index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer/utils/GridFtp.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.filetransfer.utils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.log4j.Logger;
import org.globus.ftp.DataChannelAuthentication;
import org.globus.ftp.DataSourceStream;
import org.globus.ftp.GridFTPClient;
import org.globus.ftp.HostPort;
import org.globus.ftp.Marker;
import org.globus.ftp.MarkerListener;
import org.globus.ftp.Session;
import org.globus.ftp.exception.ClientException;
import org.globus.ftp.exception.ServerException;
import org.globus.gsi.gssapi.auth.HostAuthorization;
import org.ietf.jgss.GSSCredential;
/**
* GridFTP tools
*/
public class GridFtp {
public static final String GSIFTP_SCHEME = "gsiftp";
private static final Logger log = Logger.getLogger(GridFtp.class);
/**
* Make directory at remote location
*
* @param destURI Directory name and server location to create the directory.
* @param gssCred Credentials to authenticate with remote server.
* @throws ServerException If an error occurred while authenticating.
* @throws IOException If an error occurred while creating the directory.
*/
public void makeDir(URI destURI, GSSCredential gssCred) throws Exception {
GridFTPClient destClient = null;
GridFTPContactInfo destHost = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
try {
String destPath = destURI.getPath();
log.info(("Creating Directory = " + destHost + "=" + destPath));
destClient = new GridFTPClient(destHost.hostName, destHost.port);
int tryCount = 0;
while (true) {
try {
destClient.setAuthorization(new HostAuthorization("host"));
destClient.authenticate(gssCred);
destClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
if (!destClient.exists(destPath)) {
destClient.makeDir(destPath);
}
break;
} catch (ServerException e) {
tryCount++;
if (tryCount >= 3) {
throw new Exception(e.getMessage(), e);
}
Thread.sleep(10000);
} catch (IOException e) {
tryCount++;
if (tryCount >= 3) {
throw new Exception(e.getMessage(), e);
}
Thread.sleep(10000);
}
}
} catch (ServerException e) {
throw new Exception("Cannot Create GridFTP Client to:" + destHost.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot Create GridFTP Client to:" + destHost.toString(), e);
} catch (InterruptedException e) {
throw new Exception("Internal Error cannot sleep", e);
} finally {
if (destClient != null) {
try {
destClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Upload file from stream
*
* @param destURI Name of the file to be uploaded.
* @param gsCredential Credentials to authenticate.
*/
public void updateFile(URI destURI, GSSCredential gsCredential, InputStream io) throws Exception {
GridFTPClient ftpClient = null;
GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
try {
String remoteFile = destURI.getPath();
log.info("The remote file is " + remoteFile);
log.info("Setup GridFTP Client");
ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
ftpClient.setAuthorization(new HostAuthorization("host"));
ftpClient.authenticate(gsCredential);
ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
log.info("Uploading file");
ftpClient.put(remoteFile, new DataSourceStream(io), new MarkerListener() {
public void markerArrived(Marker marker) {
}
});
log.info("Upload file to:" + remoteFile + " is done");
} catch (ServerException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} catch (ClientException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} finally {
if (ftpClient != null) {
try {
ftpClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Upload file to remote location
*
* @param destURI Name of the file to be uploaded.
* @param gsCredential Credentials used to upload the file.
* @param localFile Local file to be uploaded.
*/
public void updateFile(URI destURI, GSSCredential gsCredential, File localFile) throws Exception {
GridFTPClient ftpClient = null;
GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
try {
String remoteFile = destURI.getPath();
log.info("The local temp file is " + localFile);
log.info("the remote file is " + remoteFile);
log.info("Setup GridFTP Client");
ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
ftpClient.setAuthorization(new HostAuthorization("host"));
ftpClient.authenticate(gsCredential);
ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
log.info("Uploading file");
ftpClient.put(localFile, remoteFile, false);
log.info("Upload file to:" + remoteFile + " is done");
} catch (ServerException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} catch (ClientException e) {
throw new Exception("Cannot upload file to GridFTP:" + contactInfo.toString(), e);
} finally {
if (ftpClient != null) {
try {
ftpClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Download File from remote location
*
* @param destURI File to be downloaded.
* @param gsCredential To authenticate user to remote machine.
* @param localFile The downloaded file name.
*/
public void downloadFile(URI destURI, GSSCredential gsCredential, File localFile) throws Exception {
GridFTPClient ftpClient = null;
GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
try {
String remoteFile = destURI.getPath();
log.info("The local temp file is " + localFile);
log.info("the remote file is " + remoteFile);
log.info("Setup GridFTP Client");
ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
ftpClient.setAuthorization(new HostAuthorization("host"));
ftpClient.authenticate(gsCredential);
ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
log.info("Downloading file");
ftpClient.get(remoteFile, localFile);
log.info("Download file to:" + remoteFile + " is done");
} catch (ServerException e) {
throw new Exception("Cannot download file from GridFTP:" + contactInfo.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot download file from GridFTP:" + contactInfo.toString(), e);
} catch (ClientException e) {
throw new Exception("Cannot download file from GridFTP:" + contactInfo.toString(), e);
} finally {
if (ftpClient != null) {
try {
ftpClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Checks whether files exists.
*
* @param destURI Name of the file to check existence.
* @param gsCredential Credentials to authenticate user.
*/
public boolean exists(URI destURI, GSSCredential gsCredential) throws Exception {
GridFTPClient ftpClient = null;
GridFTPContactInfo contactInfo = new GridFTPContactInfo(destURI.getHost(), destURI.getPort());
try {
String remoteFile = destURI.getPath();
log.info("the remote file is " + remoteFile);
log.info("Setup GridFTP Client");
ftpClient = new GridFTPClient(contactInfo.hostName, contactInfo.port);
ftpClient.setAuthorization(new HostAuthorization("host"));
ftpClient.authenticate(gsCredential);
ftpClient.setDataChannelAuthentication(DataChannelAuthentication.SELF);
log.info("Checking whether file exists");
return ftpClient.exists(destURI.getPath());
} catch (ServerException e) {
throw new Exception("Cannot download file from GridFTP:" + contactInfo.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot download file from GridFTP:" + contactInfo.toString(), e);
} finally {
if (ftpClient != null) {
try {
ftpClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Stream remote file
*
* @param destURI Remote file to be read.
* @param gsCredential Credentials to authenticate user.
* @param localFile Downloaded local file name.
* @return The content of the downloaded file.
*/
public String readRemoteFile(URI destURI, GSSCredential gsCredential, File localFile) throws Exception {
BufferedReader instream = null;
File localTempfile = null;
try {
if (localFile == null) {
localTempfile = File.createTempFile("stderr", "err");
} else {
localTempfile = localFile;
}
log.info("Loca temporary file:" + localTempfile);
downloadFile(destURI, gsCredential, localTempfile);
instream = new BufferedReader(new FileReader(localTempfile));
StringBuffer buff = new StringBuffer();
String temp = null;
while ((temp = instream.readLine()) != null) {
buff.append(temp);
buff.append(System.getProperty("line.separator"));
}
log.info("finish read file:" + localTempfile);
return buff.toString();
} catch (FileNotFoundException e) {
throw new Exception("Cannot read localfile file:" + localTempfile, e);
} catch (IOException e) {
throw new Exception("Cannot read localfile file:" + localTempfile, e);
} finally {
if (instream != null) {
try {
instream.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection");
}
}
}
}
/**
* Transfer data from one GridFTp Endpoint to another GridFTP Endpoint
*
* @param srchost Source file and host.
* @param desthost Destination file and host.
* @param gssCred Credentials to be authenticate user.
* @param srcActive Tells source to be active. i.e. asking src to connect destination.
* @throws ServerException If an error occurred while transferring data.
* @throws ClientException If an error occurred while transferring data.
* @throws IOException If an error occurred while transferring data.
*/
public void transfer(URI srchost, URI desthost, GSSCredential gssCred, boolean srcActive) throws Exception {
GridFTPClient destClient = null;
GridFTPClient srcClient = null;
try {
destClient = new GridFTPClient(desthost.getHost(), desthost.getPort());
destClient.setAuthorization(new HostAuthorization("host"));
destClient.authenticate(gssCred);
destClient.setType(Session.TYPE_IMAGE);
srcClient = new GridFTPClient(srchost.getHost(), srchost.getPort());
srcClient.setAuthorization(new HostAuthorization("host"));
srcClient.authenticate(gssCred);
srcClient.setType(Session.TYPE_IMAGE);
if (srcActive) {
log.info("Set src active");
HostPort hp = destClient.setPassive();
srcClient.setActive(hp);
} else {
log.info("Set dst active");
HostPort hp = srcClient.setPassive();
destClient.setActive(hp);
}
log.info("Start transfer file from GridFTP:" + srchost.toString() + " to " + desthost.toString());
/**
* Transfer a file. The transfer() function blocks until the transfer is complete.
*/
srcClient.transfer(srchost.getPath(), destClient, desthost.getPath(), false, null);
if (srcClient.getSize(srchost.getPath()) == destClient.getSize(desthost.getPath())) {
log.info("CHECK SUM OK");
} else {
log.info("****CHECK SUM FAILED****");
}
} catch (ServerException e) {
throw new Exception("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ desthost.toString(), e);
} catch (IOException e) {
throw new Exception("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ desthost.toString(), e);
} catch (ClientException e) {
throw new Exception("Cannot transfer file from GridFTP:" + srchost.toString() + " to "
+ desthost.toString(), e);
} finally {
if (destClient != null) {
try {
destClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection at Desitnation:" + desthost.toString());
}
}
if (srcClient != null) {
try {
srcClient.close();
} catch (Exception e) {
log.info("Cannot close GridFTP client connection at Source:" + srchost.toString());
}
}
}
}
public static URI createGsiftpURI(String host, String localPath) throws URISyntaxException {
StringBuffer buf = new StringBuffer();
if (!host.startsWith("gsiftp://"))
buf.append("gsiftp://");
buf.append(host);
if (!host.endsWith("/"))
buf.append("/");
buf.append(localPath);
return new URI(buf.toString());
}
}
| 9,500 |
0 | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer/utils/ServiceConstants.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.filetransfer.utils;
public class ServiceConstants {
public static final String MYPROXY_SERVER = "myproxyServer";
public static final String MYPROXY_PORT = "myproxyPort";
public static final String MYPROXY_LIFETIME = "myproxy_lifetime";
public static final String MYPROXY_USERNAME = "myproxyUserName";
public static final String MYPROXY_PASSWD = "myproxyPasswd";
public static final String TRUSTED_CERTS_FILE = "trustedCertsFile";
public static final String HOSTCERTS_KEY_FILE = "hostcertsKeyFile";
public static final String TESTINGHOST = "testing.host";
public static final String LONESTARGRIDFTPEPR = "lonestar.gridftp.endpoint";
public static final String RANGERGRIDFTPEPR = "ranger.gridftp.endpoint";
public static final String TRESTLESGRIDFTPEPR = "trestles.gridftp.endpoint";
public static final String GRIDFTPSERVERSOURCE = "gridftpserverSource";
public static final String GRIDFTPSOURCEPATH = "gridftpSourcePath";
public static final String GRIDFTPSERVERDEST = "gridftpserverDest";
public static final String GRIDFTPDESTPATH = "gridftpDestPath";
public static final String UPLOADING_FILE_PATH = "gridftpUploadingFilePath";
}
| 9,501 |
0 | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer | Create_ds/airavata-sandbox/grid-tools/gridftp-client/src/main/java/org/apache/airavata/filetransfer/utils/GridFTPContactInfo.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.filetransfer.utils;
/**
* Class represents GridFTP Endpoint
*
*/
public class GridFTPContactInfo {
public static final int DEFAULT_GSI_FTP_PORT = 2811;
public String hostName;
public int port;
public GridFTPContactInfo(String hostName, int port) {
if (port <= 0 || port == 80) {
port = DEFAULT_GSI_FTP_PORT;
}
this.hostName = hostName;
this.port = port;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof GridFTPContactInfo) {
return hostName.equals(((GridFTPContactInfo) obj).hostName) && port == ((GridFTPContactInfo) obj).port;
} else {
return false;
}
}
@Override
public int hashCode() {
return hostName.hashCode();
}
@Override
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append(hostName).append(":").append(port);
return buf.toString();
}
}
| 9,502 |
0 | Create_ds/airavata-sandbox/grid-tools/jsdl-generator/src/main/java/org/apache/airavata/gridtools | Create_ds/airavata-sandbox/grid-tools/jsdl-generator/src/main/java/org/apache/airavata/gridtools/jsdl/GenerateJSDLExample.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.gridtools.jsdl;
import java.io.StringWriter;
import org.apache.xmlbeans.ObjectFactory;
import org.ogf.schemas.jsdl.ApplicationType;
import org.ogf.schemas.jsdl.JobDefinitionDocument;
import org.ogf.schemas.jsdl.JobDefinitionType;
import org.ogf.schemas.jsdl.JobDescriptionType;
import org.ogf.schemas.jsdl.JobIdentificationType;
import org.ogf.schemas.jsdl.posix.FileNameType;
import org.ogf.schemas.jsdl.posix.POSIXApplicationType;
public class GenerateJSDLExample {
/**
* @param args
*/
public static void main(String[] args) {
// try {
//
// JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
// .newInstance();
// JobDefinitionType jobDef = jobDefDoc.addNewJobDefinition();
//
// jobDef.addNewJobDescription().addNewResources() addNewJobDescription();
//
// ObjectFactory jsdlObjFact = new ObjectFactory();
// JobDefinitionType jsdlJobDefType = jsdlObjFact.createJobDefinitionType();
// JobDescriptionType jsdlJobDescType = jsdlObjFact.createJobDescriptionType();
//
// JobIdentificationType jsdlJobIdenType = jsdlObjFact.createJobIdentificationType();
// jsdlJobIdenType.setJobName("Airavata Test");
// jsdlJobIdenType.setDescription("Airavata JSDL Test Job");
// jsdlJobDescType.setJobIdentification(jsdlJobIdenType);
//
// POSIXApplicationType jsdlPosixAppType = jsdlObjFact.createPOSIXApplicationType();
// FileNameType execFileType = jsdlObjFact.
// execFileType.setValue("/bin/date");
// jsdlPosixAppType.setExecutable(execFileType);
// JAXBElement<POSIXApplicationType> jsdlPosixApp = jsdlObjFact.createPOSIXApplication(jsdlPosixAppType);
//
// ApplicationType jsdlAppType = jsdlObjFact.createApplicationType();
// jsdlAppType.setApplicationName("Test Date");
// jsdlAppType.setApplicationVersion("v1.0");
// jsdlAppType.setDescription("Testing Date");
// jsdlAppType.getAny().add(jsdlPosixApp);
// jsdlJobDescType.setApplication(jsdlAppType);
//
// jsdlJobDefType.setJobDescription(jsdlJobDescType);
// JAXBElement<JobDefinitionType> jsdlJobDef = jsdlObjFact.createJobDefinition(jsdlJobDefType);
//
// //generate the required jsdl
// JAXBContext jaxbContext = JAXBContext.newInstance(new Class[] { JobDefinitionType.class });
// StringWriter jsdlXMLString = new StringWriter();
// Marshaller jaxbMarshaller = jaxbContext.createMarshaller();
// jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
// jaxbMarshaller.marshal(jsdlJobDef, jsdlXMLString);
// System.out.println(jsdlXMLString.toString());
//
// } catch (JAXBException e) {
// e.printStackTrace();
// }
}
}
| 9,503 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission/gram/StampedeGramTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobLogger;
import org.junit.Ignore;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/19/13
* Time: 3:56 PM
*/
//@Ignore
public class StampedeGramTest extends GramJobSubmissionManagerTest {
// ====================== Stampede ==============================//
private ExecutionContext executionContext;
public void setUp() throws Exception {
super.setUp();
executionContext = getDefaultExecutionContext();
executionContext.setHost("stampede");
executionContext.addGramJobNotifier(new GramJobLogger());
}
public void testExecuteJobStampedeInteractive() throws Exception {
executionContext.setInteractive(true);
executeJob(executionContext);
}
public void testMonitoringRunningJobsStampede() throws Exception {
executionContext.setInteractive(true);
monitoringRunningJobs(executionContext);
}
public void testCancelJobsStampede() throws Exception {
executionContext.setInteractive(true);
cancelJob(executionContext);
}
}
| 9,504 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission/gram/GramJobSubmissionManagerTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.airavata.common.utils.DBUtil;
import org.apache.airavata.common.utils.DerbyUtil;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobLogger;
import org.apache.airavata.jobsubmission.gram.persistence.DBJobPersistenceManager;
import org.apache.airavata.jobsubmission.gram.persistence.JobData;
import org.apache.airavata.jobsubmission.gram.persistence.JobPersistenceManager;
import org.apache.airavata.security.myproxy.SecurityContext;
import org.apache.log4j.Logger;
import org.globus.gsi.provider.GlobusProvider;
import java.security.Security;
import java.util.List;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/12/13
* Time: 10:24 AM
*/
public class GramJobSubmissionManagerTest extends TestCase {
static {
Security.addProvider(new GlobusProvider());
}
private static final Logger logger = Logger.getLogger(GramJobSubmissionManagerTest.class);
protected static String hostAddress = "localhost";
protected static int port = 20000;
protected static String userName = "admin";
protected static String password = "admin";
protected static String driver = "org.apache.derby.jdbc.ClientDriver";
private String myProxyUserName = System.getProperty("myproxy.user");
private String myProxyPassword = System.getProperty("myproxy.password");
public void setUp() throws Exception{
if (myProxyUserName == null || myProxyPassword == null || myProxyUserName.trim().equals("") ||
myProxyPassword.trim().equals("")) {
logger.error("myproxy.user and myproxy.password system properties are not set. Example :- " +
"> mvn clean install -Dmyproxy.user=u1 -Dmyproxy.password=xxx");
Assert.fail("Please set myproxy.user and myproxy.password system properties.");
}
DerbyUtil.startDerbyInServerMode(getHostAddress(), getPort(), getUserName(), getPassword());
String createTable = "CREATE TABLE gram_job\n" +
"(\n" +
" job_id VARCHAR(256) NOT NULL,\n" +
" status int NOT NULL,\n" +
" PRIMARY KEY (job_id)\n" +
")";
String dropTable = "drop table gram_job";
try {
executeSQL(dropTable);
} catch (Exception e) {}
executeSQL(createTable);
ListenerQueue listenerQueue = ListenerQueue.getInstance();
listenerQueue.startListenerQueue();
}
public void tearDown() {
ListenerQueue listenerQueue = ListenerQueue.getInstance();
listenerQueue.stopListenerQueue();
}
public static void executeSQL(String sql) throws Exception {
DBUtil dbUtil = new DBUtil(getJDBCUrl(), getUserName(), getPassword(), getDriver());
dbUtil.executeSQL(sql);
}
public static String getJDBCUrl() {
return "jdbc:derby://" + getHostAddress() + ":" + getPort() + "/persistent_data;create=true;user=" + getUserName() + ";password=" + getPassword();
}
public static String getHostAddress() {
return hostAddress;
}
public static int getPort() {
return port;
}
public static String getUserName() {
return userName;
}
public static String getPassword() {
return password;
}
public static String getDriver() {
return driver;
}
public DBUtil getDbUtil () throws Exception {
return new DBUtil(getJDBCUrl(), getUserName(), getPassword(), getDriver());
}
// Dummy test case just avoid failures
public void testDummy() {}
public void executeJob(ExecutionContext executionContext) throws Exception {
SecurityContext context = new SecurityContext(myProxyUserName, myProxyPassword);
context.login();
JobPersistenceManager jobPersistenceManager
= new DBJobPersistenceManager(getDbUtil());
GramJobSubmissionManager gramJobSubmissionManager
= new GramJobSubmissionManager(jobPersistenceManager);
String jobId = gramJobSubmissionManager.executeJob(context.getRawCredential(),
executionContext.getGRAMEndPoint(),
executionContext);
Assert.assertNotNull(jobId);
Thread.sleep(2 * 60 * 1000);
logger.info("Checking whether job is in successful state in the persistence store");
List<JobData> list = jobPersistenceManager.getSuccessfullyCompletedJobs();
Assert.assertEquals(1, list.size());
Assert.assertEquals(jobId, list.get(0).getJobId());
}
public void monitoringRunningJobs(ExecutionContext executionContext) throws Exception {
SecurityContext context = new SecurityContext(myProxyUserName, myProxyPassword);
context.login();
JobPersistenceManager jobPersistenceManager
= new DBJobPersistenceManager(getDbUtil());
GramJobSubmissionManager gramJobSubmissionManager
= new GramJobSubmissionManager(jobPersistenceManager);
executionContext.addGramJobNotifier(new GramJobLogger());
String jobId = gramJobSubmissionManager.executeJob(context.getRawCredential(),
executionContext.getGRAMEndPoint(),
executionContext);
Thread.sleep(3000);
ListenerQueue listenerQueue = ListenerQueue.getInstance();
listenerQueue.stopListenerQueue();
logger.info("=================== Process Finished - Monitoring Stopped ==========================");
Assert.assertNotNull(jobId);
listenerQueue = ListenerQueue.getInstance();
listenerQueue.startListenerQueue();
logger.info("=================== Monitoring Stored Jobs ==========================");
gramJobSubmissionManager.startMonitoringRunningJobs(context.getRawCredential(), executionContext);
Thread.sleep(1 * 60 * 1000);
}
public void cancelJob(ExecutionContext executionContext) throws Exception {
SecurityContext context = new SecurityContext(myProxyUserName, myProxyPassword);
context.login();
JobPersistenceManager jobPersistenceManager
= new DBJobPersistenceManager(getDbUtil());
GramJobSubmissionManager gramJobSubmissionManager
= new GramJobSubmissionManager(jobPersistenceManager);
executionContext.addGramJobNotifier(new GramJobLogger());
String jobId = gramJobSubmissionManager.executeJob(context.getRawCredential(),
executionContext.getGRAMEndPoint(),
executionContext);
Thread.sleep(30 * 1000);
Assert.assertNotNull(jobId);
gramJobSubmissionManager.cancelJob(jobId, context.getRawCredential());
logger.info("========== End of test case ==============");
Thread.sleep(1 * 30 * 1000);
}
public static ExecutionContext getDefaultExecutionContext() throws Exception {
return new ExecutionContext();
}
}
| 9,505 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission/gram/LoneStarGramTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobLogger;
import org.junit.Ignore;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/19/13
* Time: 3:56 PM
*/
@Ignore("Lonestar is taking too much time to run tests. Not sure why.")
public class LoneStarGramTest extends GramJobSubmissionManagerTest {
private ExecutionContext executionContext;
public void setUp() throws Exception {
super.setUp();
executionContext = getDefaultExecutionContext();
executionContext.setHost("lonestar");
executionContext.addGramJobNotifier(new GramJobLogger());
}
public void testExecuteJobLoneStarInteractive() throws Exception {
executionContext.setInteractive(true);
executeJob(executionContext);
}
public void testMonitoringRunningJobsLoneStar() throws Exception {
executionContext.setInteractive(true);
monitoringRunningJobs(executionContext);
}
public void testCancelJobsLoneStar() throws Exception {
executionContext.setInteractive(true);
cancelJob(executionContext);
}
}
| 9,506 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/test/java/org/apache/airavata/jobsubmission/gram/TrestlesGramTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobLogger;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/19/13
* Time: 3:54 PM
*/
public class TrestlesGramTest extends GramJobSubmissionManagerTest {
// ====================== Trestles ==============================//
private ExecutionContext executionContext;
public void setUp() throws Exception {
super.setUp();
executionContext = getDefaultExecutionContext();
executionContext.setHost("trestles");
executionContext.addGramJobNotifier(new GramJobLogger());
}
public void testExecuteJobTrestlesInteractive() throws Exception {
executionContext.setInteractive(true);
executeJob(executionContext);
}
public void testMonitoringRunningJobsTrestles() throws Exception {
executionContext.setInteractive(true);
monitoringRunningJobs(executionContext);
}
public void testCancelJobsTrestles() throws Exception {
executionContext.setInteractive(true);
cancelJob(executionContext);
}
}
| 9,507 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/RSLGenerator.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 10:24 AM
*/
import org.apache.log4j.Logger;
import org.globus.gram.GramAttributes;
/**
* Responsible for generating the RSL from descriptors. Based on execution contexts.
*/
public class RSLGenerator {
private static final String MULTIPLE = "multiple";
private static final String MPI = "mpi";
private static final String SINGLE = "single";
private static final String CONDOR = "CONDOR";
private static final Logger log = Logger.getLogger(RSLGenerator.class);
protected GramAttributes configureRemoteJob(ExecutionContext appExecContext) throws Exception {
GramAttributes jobAttr = new GramAttributes();
jobAttr.setExecutable(appExecContext.getExecutable());
if (appExecContext.getWorkingDir() != null) {
jobAttr.setDirectory(appExecContext.getWorkingDir());
jobAttr.setStdout(appExecContext.getStdOut());
jobAttr.setStderr(appExecContext.getStderr());
}
if (appExecContext.getMaxWallTime() != null && appExecContext.getMaxWallTime() > 0) {
log.info("Setting max wall clock time to " + appExecContext.getMaxWallTime());
jobAttr.setMaxWallTime(appExecContext.getMaxWallTime());
jobAttr.set("proxy_timeout", "1");
}
if (appExecContext.getPcount() != null && appExecContext.getPcount() >= 1) {
log.info("Setting number of procs to " + appExecContext.getPcount());
jobAttr.setNumProcs(appExecContext.getPcount());
}
if (appExecContext.getHostCount() != null && appExecContext.getHostCount() >= 1) {
jobAttr.set("hostCount", String.valueOf(appExecContext.getHostCount()));
}
if (appExecContext.getProjectName() != null) {
log.info("Setting project to " + appExecContext.getProjectName());
jobAttr.setProject(appExecContext.getProjectName());
}
if (appExecContext.getQueue() != null) {
jobAttr.setQueue(appExecContext.getQueue());
}
if (appExecContext.getArguments() != null) {
jobAttr.set("arguments", appExecContext.getArguments());
}
String jobType = SINGLE;
if (appExecContext.getJobType() != null) {
jobType = appExecContext.getJobType();
}
if (jobType.equals(SINGLE)) {
log.info("Setting job type to single");
jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
} else if (jobType.equals(MPI)) {
log.info("Setting job type to mpi");
jobAttr.setJobType(GramAttributes.JOBTYPE_MPI);
} else if (jobType.equals(MULTIPLE)) {
log.info("Setting job type to multiple");
jobAttr.setJobType(GramAttributes.JOBTYPE_MULTIPLE);
} else if (jobType.equals(CONDOR)) {
log.info("Setting job type to condor");
jobAttr.setJobType(GramAttributes.JOBTYPE_CONDOR);
}
return jobAttr;
}
}
| 9,508 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/GramJobSubmissionManager.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.apache.airavata.jobsubmission.gram.persistence.JobData;
import org.apache.airavata.jobsubmission.gram.persistence.JobPersistenceManager;
import org.apache.airavata.jobsubmission.gram.persistence.PersistenceGramJobNotifier;
import org.apache.log4j.Logger;
import org.globus.gram.GramAttributes;
import org.globus.gram.GramException;
import org.globus.gram.GramJob;
import org.globus.gram.WaitingForCommitException;
import org.globus.gram.internal.GRAMConstants;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException;
import java.net.MalformedURLException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class GramJobSubmissionManager {
private static final Logger log = Logger.getLogger(GramJobSubmissionManager.class);
private static final Map<String, GramJob> currentlyExecutingJobCache = new ConcurrentHashMap<String, GramJob>();
private RSLGenerator rslGenerator;
private JobPersistenceManager jobPersistenceManager;
public GramJobSubmissionManager(JobPersistenceManager jobPersistenceManager) {
this.jobPersistenceManager = jobPersistenceManager;
this.rslGenerator = new RSLGenerator();
}
public String executeJob(GSSCredential gssCred, String contactString,
ExecutionContext appExecContext) throws Exception {
try {
//TODO remove when porting
log.setLevel(org.apache.log4j.Level.ALL);
appExecContext.addGramJobNotifier(new PersistenceGramJobNotifier(this.jobPersistenceManager));
GramAttributes jobAttr = rslGenerator.configureRemoteJob(appExecContext);
String rsl = jobAttr.toRSL();
GramJob job = new GramJob(rsl + "(twoPhase=yes)");
log.info("RSL = " + rsl);
JobSubmissionListener listener = new JobSubmissionListener(appExecContext.getGramJobNotifierList());
job.setCredentials(gssCred);
job.addListener(listener);
log.info("Request to contact:" + contactString);
try {
job.request(true, contactString, appExecContext.isInteractive());
} catch(WaitingForCommitException e) {
log.info("JobID = " + job.getIDAsString());
jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(),
GRAMConstants.STATUS_UNSUBMITTED));
ListenerQueue listenerQueue = ListenerQueue.getInstance();
listenerQueue.addJob(job);
currentlyExecutingJobCache.put(job.getIDAsString(), job);
log.debug("Two phase commit: sending COMMIT_REQUEST signal");
job.signal(GramJob.SIGNAL_COMMIT_REQUEST);
}
return job.getIDAsString();
} catch (GramException ge) {
ge.printStackTrace();
log.error(ge, ge.getCause());
} catch (GSSException gss) {
gss.printStackTrace();
log.error(gss, gss.getCause());
} catch (Exception e) {
e.printStackTrace();
log.error(e, e.getCause());
}
return null;
}
public void cancelJob(String jobId, GSSCredential gssCred) throws GramException, GSSException,
MalformedURLException {
if (currentlyExecutingJobCache.containsKey(jobId)) {
GramJob gramJob = currentlyExecutingJobCache.get(jobId);
if (gramJob != null) {
gramJob.cancel();
gramJob.signal(GramJob.SIGNAL_COMMIT_END);
}
} else {
GramJob gramJob = new GramJob(null);
gramJob.setID(jobId);
gramJob.setCredentials(gssCred);
gramJob.cancel();
gramJob.signal(GramJob.SIGNAL_COMMIT_END);
}
}
public void startMonitoringRunningJobs(GSSCredential gssCred, ExecutionContext appExecContext) throws GFacException, MalformedURLException {
ListenerQueue listenerQueue = ListenerQueue.getInstance();
List<JobData> jobDataList = this.jobPersistenceManager.getRunningJobs();
appExecContext.addGramJobNotifier(new PersistenceGramJobNotifier(this.jobPersistenceManager));
JobSubmissionListener listener = new JobSubmissionListener(appExecContext.getGramJobNotifierList());
for (JobData jobData : jobDataList) {
GramJob gramJob = new GramJob(null);
gramJob.setID(jobData.getJobId());
gramJob.setCredentials(gssCred);
gramJob.addListener(listener);
log.info("Adding job " + jobData.getJobId() + " in state " + jobData.getState()
+ " to monitoring queue");
listenerQueue.addJob(gramJob);
}
}
}
| 9,509 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/ExecutionContext.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobNotifier;
import org.apache.airavata.jobsubmission.utils.ServiceConstants;
public class ExecutionContext {
private String testingHost;
private String lonestarGRAM;
private String stampedeGRAM;
private String trestlesGRAM;
private String workingDir;
private String tmpDir;
private String stdOut;
private String stderr;
private String host;
private String executable;
private ArrayList<String[]> env;
private String inputDataDir;
private String outputDataDir;
private boolean parameterNamesNeeded = false;
private String stdIn;
private String stdoutStr;
private String stderrStr;
private String queue;
private Integer maxWallTime;
private Integer pcount;
private String projectName;
private Integer minMemory;
private Integer hostCount;
private String jobType;
private String arguments;
private boolean interactive;
private String userName;
private String password;
private List<GramJobNotifier> gramJobNotifierList = new ArrayList<GramJobNotifier>();
public static final String PROPERTY_FILE = "airavata-myproxy-client.properties";
public ExecutionContext() throws IOException {
loadConfigration();
}
private void loadConfigration() throws IOException {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
InputStream propertyStream = classLoader.getResourceAsStream(PROPERTY_FILE);
Properties properties = new Properties();
if (propertyStream != null) {
properties.load(propertyStream);
String testinghost = properties.getProperty(ServiceConstants.TESTINGHOST);
String lonestargram = properties.getProperty(ServiceConstants.LONESTARGRAMEPR);
String stampedeGram = properties.getProperty(ServiceConstants.STAMPEDE_GRAM_EPR);
String trestlesgram = properties.getProperty(ServiceConstants.TRESTLESGRAMEPR);
String exec = properties.getProperty(ServiceConstants.EXECUTABLE);
String args = properties.getProperty(ServiceConstants.ARGUMENTS);
String queueName = properties.getProperty(ServiceConstants.QUEUE);
String pn = properties.getProperty(ServiceConstants.PROJECT_NUMBER);
String jt = properties.getProperty(ServiceConstants.JOB_TYPE);
String mwt = properties.getProperty(ServiceConstants.MAX_WALL_TIME);
String pc = properties.getProperty(ServiceConstants.PCOUNT);
String hc = properties.getProperty(ServiceConstants.HOSTCOUNT);
if (testinghost != null) {
this.testingHost = testinghost;
}
if (lonestargram != null) {
this.lonestarGRAM = lonestargram;
}
if (stampedeGram != null) {
this.stampedeGRAM = stampedeGram;
}
if (trestlesgram != null) {
this.trestlesGRAM = trestlesgram;
}
if (exec != null) {
this.executable = exec;
}
if (args != null) {
this.arguments = args;
}
if (queueName != null) {
this.queue = queueName;
}
if (pn != null) {
this.projectName = pn;
}
if (jt != null) {
this.jobType = jt;
}
if (mwt != null) {
try {
this.maxWallTime = Integer.parseInt(mwt);
} catch (NumberFormatException e) {
this.maxWallTime = 1;
}
}
if (pc != null) {
try {
this.pcount = Integer.parseInt(pc);
} catch (NumberFormatException e) {
this.pcount = 1;
}
}
if (hc != null) {
try {
this.hostCount = Integer.parseInt(hc);
} catch (NumberFormatException e) {
this.hostCount = 1;
}
}
}
}
public String getTestingHost() {
return testingHost;
}
public void setTestingHost(String testingHost) {
this.testingHost = testingHost;
}
public String getLonestarGRAM() {
return lonestarGRAM;
}
public void setLonestarGRAM(String lonestarGRAM) {
this.lonestarGRAM = lonestarGRAM;
}
public String getStampedeGRAM() {
return stampedeGRAM;
}
public void setStampedeGRAM(String stampedeGRAM) {
this.stampedeGRAM = stampedeGRAM;
}
public String getTrestlesGRAM() {
return trestlesGRAM;
}
public String getGRAMEndPoint() {
if (this.getHost().equals("trestles")) {
return this.getTrestlesGRAM();
} else if (this.getHost().equals("stampede")) {
return this.getStampedeGRAM();
} else if (this.getHost().equals("lonestar")) {
return this.getLonestarGRAM();
} else {
throw new RuntimeException("Invalid host " + this.getHost() );
}
}
public void setTrestlesGRAM(String trestlesGRAM) {
this.trestlesGRAM = trestlesGRAM;
}
/**
* @return the workingDir
*/
public String getWorkingDir() {
return workingDir;
}
/**
* @param workingDir
* the workingDir to set
*/
public void setWorkingDir(String workingDir) {
this.workingDir = workingDir;
}
/**
* @return the tmpDir
*/
public String getTmpDir() {
return tmpDir;
}
/**
* @param tmpDir
* the tmpDir to set
*/
public void setTmpDir(String tmpDir) {
this.tmpDir = tmpDir;
}
/**
* @return the stdOut
*/
public String getStdOut() {
return stdOut;
}
/**
* @param stdOut
* the stdOut to set
*/
public void setStdOut(String stdOut) {
this.stdOut = stdOut;
}
/**
* @return the stderr
*/
public String getStderr() {
return stderr;
}
/**
* @param stderr
* the stderr to set
*/
public void setStderr(String stderr) {
this.stderr = stderr;
}
/**
* @return the host
*/
public String getHost() {
return host;
}
/**
* @param host
* the host to set
*/
public void setHost(String host) {
this.host = host;
}
/**
* @return the executable
*/
public String getExecutable() {
return executable;
}
/**
* @param executable
* the executable to set
*/
public void setExecutable(String executable) {
this.executable = executable;
}
/**
* @return the env
*/
public ArrayList<String[]> getEnv() {
return env;
}
/**
* @param env
* the env to set
*/
public void setEnv(ArrayList<String[]> env) {
this.env = env;
}
/**
* @return the inputDataDir
*/
public String getInputDataDir() {
return inputDataDir;
}
/**
* @param inputDataDir
* the inputDataDir to set
*/
public void setInputDataDir(String inputDataDir) {
this.inputDataDir = inputDataDir;
}
/**
* @return the outputDataDir
*/
public String getOutputDataDir() {
return outputDataDir;
}
/**
* @param outputDataDir
* the outputDataDir to set
*/
public void setOutputDataDir(String outputDataDir) {
this.outputDataDir = outputDataDir;
}
/**
* @return the parameterNamesNeeded
*/
public boolean isParameterNamesNeeded() {
return parameterNamesNeeded;
}
/**
* @param parameterNamesNeeded
* the parameterNamesNeeded to set
*/
public void setParameterNamesNeeded(boolean parameterNamesNeeded) {
this.parameterNamesNeeded = parameterNamesNeeded;
}
/**
* @return the stdIn
*/
public String getStdIn() {
return stdIn;
}
/**
* @param stdIn
* the stdIn to set
*/
public void setStdIn(String stdIn) {
this.stdIn = stdIn;
}
/**
* @return the stdoutStr
*/
public String getStdoutStr() {
return stdoutStr;
}
/**
* @param stdoutStr
* the stdoutStr to set
*/
public void setStdoutStr(String stdoutStr) {
this.stdoutStr = stdoutStr;
}
/**
* @return the stderrStr
*/
public String getStderrStr() {
return stderrStr;
}
/**
* @param stderrStr
* the stderrStr to set
*/
public void setStderrStr(String stderrStr) {
this.stderrStr = stderrStr;
}
public void setQueue(String queue) {
this.queue = queue;
}
public String getQueue() {
return queue;
}
public void setMaxWallTime(Integer maxWallTime) {
this.maxWallTime = maxWallTime;
}
public Integer getMaxWallTime() {
return maxWallTime;
}
/**
* @return the pcount
*/
public Integer getPcount() {
return pcount;
}
/**
* @param pcount
* the pcount to set
*/
public void setPcount(Integer pcount) {
this.pcount = pcount;
}
/**
* @return the projectName
*/
public String getProjectName() {
return projectName;
}
/**
* @param projectName
* the projectName to set
*/
public void setProjectName(String projectName) {
this.projectName = projectName;
}
/**
* @return the minMemory
*/
public Integer getMinMemory() {
return minMemory;
}
/**
* @param minMemory
* the minMemory to set
*/
public void setMinMemory(Integer minMemory) {
this.minMemory = minMemory;
}
/**
* @return the hostCount
*/
public Integer getHostCount() {
return hostCount;
}
/**
* @param hostCount
* the hostCount to set
*/
public void setHostCount(Integer hostCount) {
this.hostCount = hostCount;
}
public void setJobType(String jobType) {
this.jobType = jobType;
}
public String getJobType() {
return jobType;
}
public void setArguments(String arguments) {
this.arguments = arguments;
}
public String getArguments() {
return arguments;
}
public boolean isInteractive() {
return interactive;
}
public void setInteractive(boolean interactive) {
this.interactive = interactive;
}
public void addGramJobNotifier(GramJobNotifier gramJobNotifier) {
this.gramJobNotifierList.add(gramJobNotifier);
}
public List<GramJobNotifier> getGramJobNotifierList() {
return Collections.unmodifiableList(gramJobNotifierList);
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
| 9,510 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/ListenerQueue.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.globus.gram.GramException;
import org.globus.gram.GramJob;
import org.globus.util.deactivator.Deactivator;
import org.ietf.jgss.GSSException;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/17/13
* Time: 2:15 PM
*/
public class ListenerQueue extends Thread {
private final Queue<JobListenerThread> qe;
private volatile static ListenerQueue listenerQueue;
private volatile boolean shutDown = false;
private volatile boolean isWaiting = false;
private ListenerQueue() {
qe = new ConcurrentLinkedQueue<JobListenerThread>();
}
public static ListenerQueue getInstance() {
if (null == listenerQueue) {
listenerQueue = new ListenerQueue();
return listenerQueue;
} else {
return listenerQueue;
}
}
public void run() {
while (!shutDown) {
consume();
try {
synchronized (qe) {
isWaiting = true;
qe.wait();
isWaiting = false;
}
} catch (InterruptedException e) {
//Thread.currentThread().interrupt();
}
}
}
public void stopListenerQueue() {
shutDown = true;
synchronized (qe) {
if (isWaiting) {
qe.notifyAll();
}
}
listenerQueue = null;
Deactivator.deactivateAll();
}
public void startListenerQueue() {
shutDown = false;
this.start();
}
public void consume() {
while(!qe.isEmpty()) {
JobListenerThread jobListenerThread = qe.poll();
jobListenerThread.start();
}
}
public synchronized void addJob(GramJob job) {
qe.offer(new JobListenerThread(job));
synchronized (qe) {
qe.notifyAll();
}
}
class JobListenerThread extends Thread {
private GramJob listeningJob;
public JobListenerThread(GramJob job) {
listeningJob = job;
}
public void run() {
try {
listeningJob.bind();
} catch (GramException e) {
e.printStackTrace();
} catch (GSSException e) {
e.printStackTrace();
}
}
}
}
| 9,511 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/GFacException.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 4:21 PM
*/
public class GFacException extends Exception {
public GFacException(String message) {
super(message);
}
public GFacException(Exception exception) {
super(exception);
}
}
| 9,512 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/JobSubmissionListener.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobNotifier;
import org.apache.log4j.Logger;
import org.globus.gram.GramException;
import org.globus.gram.GramJob;
import org.globus.gram.GramJobListener;
import org.globus.gram.internal.GRAMProtocolErrorConstants;
import org.ietf.jgss.GSSException;
import java.util.List;
public class JobSubmissionListener implements GramJobListener {
private int currentStatus = -43;
private boolean isSubmitted = false;
private static final Logger log = Logger.getLogger(JobSubmissionListener.class);
private List<GramJobNotifier> gramJobNotifierList;
public JobSubmissionListener(List<GramJobNotifier> notifiers) {
this.gramJobNotifierList = notifiers;
}
public synchronized void statusChanged(GramJob job) {
log.debug("Listener: statusChanged triggered");
int jobStatus = job.getStatus();
if (currentStatus != jobStatus) {
currentStatus = jobStatus;
if (currentStatus == GramJob.STATUS_FAILED) {
int error = job.getError();
log.debug("Job Error Code: " + error);
try {
job.unbind();
if (error == GRAMProtocolErrorConstants.USER_CANCELLED) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnCancel(job);
}
} else if (error == GRAMProtocolErrorConstants.ERROR_AUTHORIZATION) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnAuthorisationDenied(job);
}
} else {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnError(job);
}
}
} catch (Exception e) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnListenerError(job, e);
}
}
} else if (currentStatus == GramJob.STATUS_DONE) {
try {
job.unbind();
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnCompletion(job);
}
} catch (Exception e) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnListenerError(job, e);
}
}
} else if (currentStatus == GramJob.STATUS_ACTIVE) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnActive(job);
}
} else if (currentStatus == GramJob.STATUS_PENDING) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnPending(job);
}
} else if (currentStatus == GramJob.STATUS_UNSUBMITTED) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnUnSubmit(job);
}
} else if (currentStatus == GramJob.STATUS_STAGE_IN) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnFilesStagedIn(job);
}
} else if (currentStatus == GramJob.STATUS_STAGE_OUT) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnFilesStagedOut(job);
}
} else if (currentStatus == GramJob.STATUS_SUSPENDED) {
for(GramJobNotifier notifier : gramJobNotifierList) {
notifier.OnSuspend(job);
}
}
}
}
public static String formatJobStatus(String jobid, String jobstatus) {
return System.currentTimeMillis() + " " + jobid + " " + jobstatus;
}
/*public int getError() {
return error;
}
public int getStatus() {
return status;
}*/
public void wakeup() {
try {
notify();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 9,513 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/notifier/GramJobLogger.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.notifier;
import org.apache.log4j.Logger;
import org.globus.gram.GramJob;
import org.ietf.jgss.GSSException;
import java.util.Locale;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 10:59 AM
*/
public class GramJobLogger implements GramJobNotifier {
private static final Logger log = Logger.getLogger(GramJobLogger.class);
private static ResourceBundle resources;
private String format = "Job Id [ %s ] is in [ %s ] state.";
static {
try {
resources = ResourceBundle.getBundle("org.apache.airavata.jobsubmission.gram.errors",
Locale.getDefault());
} catch (MissingResourceException mre) {
System.err.println("org.globus.gram.internal.gram.errors.properties not found");
}
}
public void OnPending(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "PENDING"));
}
public void OnActive(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "ACTIVE"));
}
public void OnError(GramJob job) {
log.info(String.format("Job Id [ %s ] is in %s state. Error code - %d and description - %s",
job.getIDAsString(), "[ ERROR ]", job.getError(),"TODO fix bundle loading"));
}
public void OnCompletion(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "COMPLETE"));
}
public void OnCancel(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "USER-CANCELED"));
}
public void OnSuspend(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "SUSPEND"));
}
public void OnUnSubmit(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "UN-SUBMITTED"));
}
public void OnFilesStagedIn(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "FILES-STAGED-IN"));
}
public void OnFilesStagedOut(GramJob job) {
log.info(String.format(format, job.getIDAsString(), "FILES-STAGED-OUT"));
}
public void OnListenerError(GramJob job, Exception e) {
log.error("An error occurred while monitoring job id - " + job.getIDAsString(), e);
}
public void OnAuthorisationDenied(GramJob job) {
try {
log.error("Authorisation denied for job execution. User name - "
+ job.getCredentials().getName().toString());
} catch (GSSException e) {
log.error("An error occurred while logging authorisation information - ", e);
}
}
}
| 9,514 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/notifier/GramJobNotifier.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.notifier;
import org.globus.gram.GramJob;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 10:45 AM
*/
/**
* This interface abstracts out state changes of a job submitted to GFac.
* For each state change of a job an appropriate method will get called.
* Further each method will get details about the executing job as a
* GramJob object.
*/
public interface GramJobNotifier {
/**
* This method will get called when job is in pending state.
* @param job Currently executing job.
*/
void OnPending(GramJob job);
/**
* This method will get called when job is in active state.
* @param job Currently executing job.
*/
void OnActive(GramJob job);
/**
* This method will get called when job is in Error state.
* @param job Currently executing job.
*/
void OnError (GramJob job);
/**
* This method will get called when job is completed.
* @param job Currently executing job.
*/
void OnCompletion(GramJob job);
/**
* This method will get called when some process cancels the currently executing job.
* @param job Currently executing job.
*/
void OnCancel (GramJob job);
/**
* This method will get called when job is in suspended state.
* @param job Currently executing job.
*/
void OnSuspend (GramJob job);
/**
* This method will get called when job is in un-submitted state.
* @param job Currently executing job.
*/
void OnUnSubmit (GramJob job);
/**
* When job stage in input files, this method will get called.
* @param job Currently executing job.
*/
void OnFilesStagedIn (GramJob job);
/**
* When job stage out input files, this method will get called.
* @param job Currently executing job.
*/
void OnFilesStagedOut (GramJob job);
/**
* If an unexpected error occurs in the listener code this method will get
* called.
* @param job Currently executing job.
* @param e The unexpected exception.
*/
void OnListenerError(GramJob job, Exception e);
/**
* If authorisation failed.
* @param job Currently executing job.
*/
void OnAuthorisationDenied(GramJob job);
}
| 9,515 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/persistence/JobPersistenceManager.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.persistence;
import org.apache.airavata.jobsubmission.gram.GFacException;
import java.util.List;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 2:23 PM
*/
/**
* Responsible persisting job data. This data is useful during a restart.
* When restarting Airavata can resume monitoring currently executing jobs.
*/
public interface JobPersistenceManager {
/**
* Updates the job state in the persisting storage.
* @param jobData Job data to update.
* @throws GFacException If an error occurred while updating job data.
*/
void updateJobStatus (JobData jobData) throws GFacException;
/**
* Get all running jobs.
* @return Job ids which are not failed nor completed.
* @throws GFacException If an error occurred while querying job data.
*/
List<JobData> getRunningJobs() throws GFacException;
/**
* Get all failed job ids.
* @return Failed job ids.
* @throws GFacException If an error occurred while querying job data.
*/
List<JobData> getFailedJobs() throws GFacException;
/**
* Get all un-submitted job ids.
* @return Un-submitted job ids.
* @throws GFacException If an error occurred while querying job data.
*/
List<JobData> getUnSubmittedJobs() throws GFacException;
/**
* Get all successfully completed job ids.
* @return Successfully completed job ids.
* @throws GFacException If an error occurred while querying job data.
*/
List<JobData> getSuccessfullyCompletedJobs() throws GFacException;
}
| 9,516 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/persistence/DBJobPersistenceManager.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.persistence;
import org.apache.airavata.common.utils.DBUtil;
import org.apache.airavata.jobsubmission.gram.GFacException;
import org.apache.log4j.Logger;
import org.globus.gram.internal.GRAMConstants;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 4:16 PM
*/
public class DBJobPersistenceManager implements JobPersistenceManager {
private DBUtil dbUtil;
private static final Logger log = Logger.getLogger(DBJobPersistenceManager.class);
public DBJobPersistenceManager(DBUtil db) {
this.dbUtil = db;
}
public synchronized void updateJobStatus(JobData jobData) throws GFacException{
if (jobData.getState() == GRAMConstants.STATUS_UNSUBMITTED) {
insertJob(jobData);
} else {
String sql = "update gram_job set status = ? where job_id = ?";
Connection connection = null;
PreparedStatement stmt = null;
try {
connection = getConnection();
stmt = connection.prepareStatement(sql);
stmt.setInt(1, jobData.getState());
stmt.setString(2, jobData.getJobId());
stmt.executeUpdate();
connection.commit();
} catch (SQLException e) {
throw new GFacException(e);
} finally {
try {
if (stmt != null) {
stmt.close();
}
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
log.error("Error closing streams", e);
}
}
}
}
private void insertJob(JobData jobData) throws GFacException {
String sql = "insert into gram_job values (?, ?)";
PreparedStatement stmt = null;
Connection connection = null;
try {
connection = getConnection();
stmt = connection.prepareStatement(sql);
stmt.setString(1, jobData.getJobId());
stmt.setInt(2, jobData.getState());
stmt.executeUpdate();
} catch (SQLException e) {
throw new GFacException(e);
} finally {
try {
if (stmt != null) {
stmt.close();
}
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
log.error("Error closing streams", e);
}
}
}
public List<JobData> getRunningJobs() throws GFacException {
String sql = "select * from gram_job where status not in (?, ?, ?)";
int[] statuses = new int[3];
statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
statuses[1] = GRAMConstants.STATUS_DONE;
statuses[2] = GRAMConstants.STATUS_FAILED;
return getJobs(sql, statuses);
}
public List<JobData> getFailedJobs() throws GFacException {
String sql = "select * from gram_job where status in (?)";
int[] statuses = new int[1];
statuses[0] = GRAMConstants.STATUS_FAILED;
return getJobs(sql, statuses);
}
public List<JobData> getUnSubmittedJobs() throws GFacException {
String sql = "select * from gram_job where status in (?)";
int[] statuses = new int[1];
statuses[0] = GRAMConstants.STATUS_UNSUBMITTED;
return getJobs(sql, statuses);
}
public List<JobData> getSuccessfullyCompletedJobs() throws GFacException {
String sql = "select * from gram_job where status in (?)";
int[] statuses = new int[1];
statuses[0] = GRAMConstants.STATUS_DONE;
return getJobs(sql, statuses);
}
protected List<JobData> getJobs(String sql, int[] statuses) throws GFacException {
List<JobData> jobs = new ArrayList<JobData>();
PreparedStatement preparedStatement = null;
Connection connection = null;
try {
connection = getConnection();
preparedStatement = connection.prepareStatement(sql);
int index = 1;
for (int status : statuses) {
preparedStatement.setInt(index, status);
++index;
}
ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
String jobId = resultSet.getString("job_id");
int state = resultSet.getInt("status");
jobs.add(new JobData(jobId, state));
}
} catch (SQLException e) {
throw new GFacException(e);
} finally {
try {
if (preparedStatement != null) {
preparedStatement.close();
}
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
log.error("Error closing connection", e);
}
}
return jobs;
}
private synchronized Connection getConnection() throws SQLException {
Connection connection = dbUtil.getConnection();
connection.setAutoCommit(true);
return connection;
}
}
| 9,517 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/persistence/JobData.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.persistence;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 2:34 PM
*/
public class JobData {
private String jobId;
private int state;
public JobData(String id, int state) {
this.jobId = id;
this.state = state;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public int getState() {
return state;
}
public void setState(int state) {
this.state = state;
}
}
| 9,518 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/gram/persistence/PersistenceGramJobNotifier.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.gram.persistence;
import org.apache.airavata.jobsubmission.gram.GFacException;
import org.apache.airavata.jobsubmission.gram.notifier.GramJobNotifier;
import org.globus.gram.GramJob;
/**
* User: AmilaJ (amilaj@apache.org)
* Date: 6/18/13
* Time: 3:49 PM
*/
public class PersistenceGramJobNotifier implements GramJobNotifier {
private JobPersistenceManager jobPersistenceManager;
public PersistenceGramJobNotifier(JobPersistenceManager persistenceManager) {
this.jobPersistenceManager = persistenceManager;
}
public void OnPending(GramJob job) {
try {
this.jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(), job.getStatus()));
} catch (GFacException e) {
e.printStackTrace();
}
}
public void OnActive(GramJob job) {}
public void OnError(GramJob job) {
try {
this.jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(), job.getStatus()));
} catch (GFacException e) {
e.printStackTrace();
}
}
public void OnCompletion(GramJob job) {
try {
this.jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(), job.getStatus()));
} catch (GFacException e) {
e.printStackTrace();
}
}
public void OnCancel(GramJob job) {
try {
this.jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(), job.getStatus()));
} catch (GFacException e) {
e.printStackTrace();
}
}
public void OnSuspend(GramJob job) {}
public void OnUnSubmit(GramJob job) {}
public void OnFilesStagedIn(GramJob job) {}
public void OnFilesStagedOut(GramJob job) {}
public void OnListenerError(GramJob job, Exception e) {}
public void OnAuthorisationDenied(GramJob job) {
try {
this.jobPersistenceManager.updateJobStatus(new JobData(job.getIDAsString(), job.getStatus()));
} catch (GFacException e) {
e.printStackTrace();
}
}
}
| 9,519 |
0 | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission | Create_ds/airavata-sandbox/grid-tools/gram-client/src/main/java/org/apache/airavata/jobsubmission/utils/ServiceConstants.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.jobsubmission.utils;
public class ServiceConstants {
public static final String MYPROXY_SERVER = "myproxyServer";
public static final String MYPROXY_PORT = "myproxyPort";
public static final String MYPROXY_LIFETIME = "myproxy_lifetime";
public static final String MYPROXY_USERNAME = "myproxyUserName";
public static final String MYPROXY_PASSWD = "myproxyPasswd";
public static final String TRUSTED_CERTS_FILE = "trustedCertsFile";
public static final String HOSTCERTS_KEY_FILE = "hostcertsKeyFile";
public static final String TESTINGHOST = "testing.host";
public static final String LONESTARGRAMEPR = "lonestar.gram.endpoint";
public static final String STAMPEDE_GRAM_EPR = "stampede.gram.endpoint";
public static final String TRESTLESGRAMEPR = "trestles.gram.endpoint";
public static final String EXECUTABLE = "executable";
public static final String ARGUMENTS = "arguments";
public static final String QUEUE = "queue";
public static final String PROJECT_NUMBER = "project_number";
public static final String MAX_WALL_TIME = "maxwalltime";
public static final String JOB_TYPE = "jobtype";
public static final String PCOUNT = "processors";
public static final String HOSTCOUNT = "hosts";
}
| 9,520 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/Orchestrator.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
/*
This is the interface for orchestrator functionality exposed to the out side of the
module
*/
public interface Orchestrator {
/**
* This method will initialize the Orchestrator, during restart this will
* get called and do init tasks
* @return
* @throws OrchestratorException
*/
boolean initialize() throws OrchestratorException;
/**
* This method is the very first method which create an entry in
* database for a given experiment, this return the experiment ID, so
* user have full control for the experiment
* @param request
* @return
* @throws OrchestratorException
*/
String createExperiment(ExperimentRequest request) throws OrchestratorException;
/**
* After creating the experiment user has the experimentID, then user
* can create the JobRequest and send the Job input parameters to Orchestrator
* @param request
* @return
* @throws OrchestratorException
*/
boolean acceptExperiment(JobRequest request) throws OrchestratorException;
/**
* This is like a cron job which runs continuously and take available jobs to
* submit to GFAC and submit them to GFAC
* @throws OrchestratorException
*/
void startJobSubmitter() throws OrchestratorException;
/**
* This method will get called during graceful shutdown of Orchestrator
* This can be used to handle the shutdown of orchestrator gracefully.
* @return
* @throws OrchestratorException
*/
void shutdown() throws OrchestratorException;
}
| 9,521 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/JobRequest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobRequest {
private final static Logger logger = LoggerFactory.getLogger(JobRequest.class);
private String userName;
private String experimentID;
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getExperimentID() {
return experimentID;
}
public void setExperimentID(String experimentID) {
this.experimentID = experimentID;
}
}
| 9,522 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/ExperimentRequest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ExperimentRequest {
private final static Logger logger = LoggerFactory.getLogger(ExperimentRequest.class);
private String userName;
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
}
| 9,523 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/OrchestratorConfiguration.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
public class OrchestratorConfiguration {
private String submitterClass;
private int submitterInterval = 1000;
private int threadPoolSize = 10;
private boolean startSubmitter = false;
/* setter methods */
public String getSubmitterClass() {
return submitterClass;
}
public int getSubmitterInterval() {
return submitterInterval;
}
public int getThreadPoolSize() {
return threadPoolSize;
}
/* setter methods */
public void setSubmitterClass(String submitterClass) {
this.submitterClass = submitterClass;
}
public void setSubmitterInterval(int submitterInterval) {
this.submitterInterval = submitterInterval;
}
public void setThreadPoolSize(int threadPoolSize) {
this.threadPoolSize = threadPoolSize;
}
public boolean isStartSubmitter() {
return startSubmitter;
}
public void setStartSubmitter(boolean startSubmitter) {
this.startSubmitter = startSubmitter;
}
}
| 9,524 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/PullBasedOrchestrator.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
import org.apache.airavata.common.exception.AiravataConfigurationException;
import org.apache.airavata.common.utils.AiravataJobState;
import org.apache.airavata.orchestrator.core.context.OrchestratorContext;
import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
import org.apache.airavata.orchestrator.core.gfac.GFACInstance;
import org.apache.airavata.orchestrator.core.utils.OrchestratorConstants;
import org.apache.airavata.orchestrator.core.utils.OrchestratorUtils;
import org.apache.airavata.persistance.registry.jpa.impl.AiravataJPARegistry;
import org.apache.airavata.registry.api.*;
import org.apache.airavata.registry.api.exception.RegistryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URL;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class PullBasedOrchestrator implements Orchestrator {
private final static Logger logger = LoggerFactory.getLogger(PullBasedOrchestrator.class);
OrchestratorContext orchestratorContext;
AiravataRegistry2 airavataRegistry;
ExecutorService executor;
public boolean initialize() throws OrchestratorException {
try {
/* Initializing the OrchestratorConfiguration object */
OrchestratorConfiguration orchestratorConfiguration = OrchestratorUtils.loadOrchestratorConfiguration();
/* initializing the Orchestratorcontext object */
airavataRegistry = AiravataRegistryFactory.getRegistry(new Gateway("default"), new AiravataUser("admin"));
Map<String, Integer> gfacNodeList = airavataRegistry.getGFACNodeList();
if (gfacNodeList.size() == 0) {
String error = "No GFAC instances available in the system, Can't initialize Orchestrator";
logger.error(error);
throw new OrchestratorException(error);
}
Set<String> uriList = gfacNodeList.keySet();
Iterator<String> iterator = uriList.iterator();
List<GFACInstance> gfacInstanceList = new ArrayList<GFACInstance>();
while (iterator.hasNext()) {
String uri = iterator.next();
Integer integer = gfacNodeList.get(uri);
gfacInstanceList.add(new GFACInstance(uri, integer));
}
orchestratorContext = new OrchestratorContext(gfacInstanceList);
orchestratorContext.setOrchestratorConfiguration(orchestratorConfiguration);
/* Starting submitter thread pool */
executor = Executors.newFixedThreadPool(orchestratorConfiguration.getThreadPoolSize());
} catch (RegistryException e) {
logger.error("Failed to initializing Orchestrator");
OrchestratorException orchestratorException = new OrchestratorException(e);
throw orchestratorException;
} catch (AiravataConfigurationException e) {
logger.error("Failed to initializing Orchestrator");
OrchestratorException orchestratorException = new OrchestratorException(e);
throw orchestratorException;
} catch (IOException e) {
logger.error("Failed to initializing Orchestrator - Error parsing orchestrator.properties");
OrchestratorException orchestratorException = new OrchestratorException(e);
throw orchestratorException;
}
return true;
}
public void shutdown() throws OrchestratorException {
executor.shutdown();
}
//todo decide whether to return an error or do what
public String createExperiment(ExperimentRequest request) throws OrchestratorException {
String experimentID = UUID.randomUUID().toString();
String username = request.getUserName();
try {
airavataRegistry.storeExperiment(username, experimentID);
airavataRegistry.changeStatus(username, experimentID, AiravataJobState.State.CREATED);
} catch (RegistryException e) {
//todo put more meaningful error message
logger.error("Failed to create experiment for the request from " + request.getUserName());
throw new OrchestratorException(e);
}
return experimentID;
}
public boolean acceptExperiment(JobRequest request) throws OrchestratorException {
// validate the jobRequest first
if (!OrchestratorUtils.validateJobRequest(request)) {
logger.error("Invalid Job request sent, Experiment creation failed");
return false;
}
String experimentID = request.getExperimentID();
String username = request.getUserName();
try {
airavataRegistry.changeStatus(username, experimentID, AiravataJobState.State.ACCEPTED);
} catch (RegistryException e) {
//todo put more meaningful error message
logger.error("Failed to create experiment for the request from " + request.getUserName());
return false;
}
return true;
}
public void startJobSubmitter() throws OrchestratorException {
for (int i = 0; i < orchestratorContext.getOrchestratorConfiguration().getThreadPoolSize(); i++) {
JobSubmitterWorker jobSubmitterWorker = new JobSubmitterWorker(orchestratorContext);
executor.execute(jobSubmitterWorker);
}
}
}
| 9,525 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/JobSubmitterWorker.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core;
import org.apache.airavata.orchestrator.core.context.OrchestratorContext;
import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
import org.apache.airavata.orchestrator.core.gfac.GFACInstance;
import org.apache.airavata.orchestrator.core.job.JobSubmitter;
import org.apache.airavata.orchestrator.core.utils.OrchestratorConstants;
import org.apache.airavata.registry.api.exception.RegistryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.Properties;
public class JobSubmitterWorker implements Runnable {
private final static Logger logger = LoggerFactory.getLogger(JobSubmitterWorker.class);
private OrchestratorContext orchestratorContext;
private JobSubmitter jobSubmitter;
// Set the default submit interval value
private int submitInterval = 1000;
public JobSubmitterWorker(OrchestratorContext orchestratorContext) throws OrchestratorException {
this.orchestratorContext = orchestratorContext;
try {
String submitterClass = this.orchestratorContext.getOrchestratorConfiguration().getSubmitterClass();
submitInterval = this.orchestratorContext.getOrchestratorConfiguration().getSubmitterInterval();
Class<? extends JobSubmitter> aClass = Class.forName(submitterClass.trim()).asSubclass(JobSubmitter.class);
jobSubmitter = aClass.newInstance();
} catch (ClassNotFoundException e) {
logger.error("Error while loading Job Submitter");
} catch (InstantiationException e) {
logger.error("Error while loading Job Submitter");
throw new OrchestratorException(e);
} catch (IllegalAccessException e) {
logger.error("Error while loading Job Submitter");
throw new OrchestratorException(e);
}
}
public void run() {
/* implement logic to submit job batches time to time */
int idleCount = 0;
while (true) {
try {
Thread.sleep(submitInterval);
} catch (InterruptedException e) {
logger.error("Error in JobSubmitter during sleeping process before submit jobs");
e.printStackTrace();
}
/* Here the worker pick bunch of jobs available to submit and submit that to a single
GFAC instance, we do not handle job by job submission to each gfac instance
*/
GFACInstance gfacInstance = jobSubmitter.selectGFACInstance(orchestratorContext);
// Now we have picked a gfac instance to submit set of jobs at this time, now its time to
// select what are the jobs available to submit
try {
List<String> allAcceptedJobs = orchestratorContext.getRegistry().getAllAcceptedJobs();
List<String> allHangedJobs = orchestratorContext.getRegistry().getAllHangedJobs();
if (allAcceptedJobs.size() == 0) {
idleCount++;
if (idleCount == 10) {
try {
Thread.sleep(submitInterval*2);
} catch (InterruptedException e) {
logger.error("Error in JobSubmitter during sleeping process before submit jobs");
e.printStackTrace();
}
idleCount=0;
}
continue;
}
jobSubmitter.submitJob(gfacInstance,allAcceptedJobs);
/* After submitting available jobs try to schedule again and then submit*/
jobSubmitter.submitJob(jobSubmitter.selectGFACInstance(orchestratorContext),allHangedJobs);
} catch (RegistryException e) {
logger.error("Error while trying to retrieve available ");
}
}
}
public OrchestratorContext getOrchestratorContext() {
return orchestratorContext;
}
public void setOrchestratorContext(OrchestratorContext orchestratorContext) {
this.orchestratorContext = orchestratorContext;
}
}
| 9,526 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/impl/SimpleJobSubmitter.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.impl;
import org.apache.airavata.orchestrator.core.context.OrchestratorContext;
import org.apache.airavata.orchestrator.core.gfac.GFACInstance;
import org.apache.airavata.orchestrator.core.job.JobSubmitter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SimpleJobSubmitter implements JobSubmitter{
private final static Logger logger = LoggerFactory.getLogger(SimpleJobSubmitter.class);
public GFACInstance selectGFACInstance(OrchestratorContext context) {
return null;
}
public boolean submitJob(GFACInstance gfac) {
return false;
}
}
| 9,527 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/context/OrchestratorContext.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.context;
import org.apache.airavata.orchestrator.core.OrchestratorConfiguration;
import org.apache.airavata.orchestrator.core.gfac.GFACInstance;
import org.apache.airavata.registry.api.AiravataRegistry2;
import org.apache.airavata.registry.api.OrchestratorRegistry;
import java.util.ArrayList;
import java.util.List;
public class OrchestratorContext {
private List<GFACInstance> gfacInstanceList;
private OrchestratorConfiguration orchestratorConfiguration;
private AiravataRegistry2 registry;
public OrchestratorContext(List<GFACInstance> gfacInstanceList) {
this.gfacInstanceList = new ArrayList<GFACInstance>();
}
public List<GFACInstance> getGfacInstanceList() {
return gfacInstanceList;
}
public void addGfacInstanceList(GFACInstance instance) {
this.gfacInstanceList.add(instance);
}
public OrchestratorConfiguration getOrchestratorConfiguration() {
return orchestratorConfiguration;
}
public void setOrchestratorConfiguration(OrchestratorConfiguration orchestratorConfiguration) {
this.orchestratorConfiguration = orchestratorConfiguration;
}
public OrchestratorRegistry getRegistry() {
return registry;
}
public void setRegistry(AiravataRegistry2 registry) {
this.registry = registry;
}
}
| 9,528 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/utils/OrchestratorUtils.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.utils;
import org.apache.airavata.orchestrator.core.JobRequest;
import org.apache.airavata.orchestrator.core.JobSubmitterWorker;
import org.apache.airavata.orchestrator.core.OrchestratorConfiguration;
import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URL;
import java.util.Properties;
public class OrchestratorUtils {
private final static Logger logger = LoggerFactory.getLogger(OrchestratorUtils.class);
public static OrchestratorConfiguration loadOrchestratorConfiguration() throws OrchestratorException, IOException {
URL resource =
JobSubmitterWorker.class.getClassLoader().getResource(OrchestratorConstants.ORCHESTRATOR_PROPERTIES);
if (resource == null) {
String error = "orchestrator.properties cannot be found, Failed to initialize Orchestrator";
logger.error(error);
throw new OrchestratorException(error);
}
OrchestratorConfiguration orchestratorConfiguration = new OrchestratorConfiguration();
Properties orchestratorProps = new Properties();
orchestratorProps.load(resource.openStream());
orchestratorConfiguration.setSubmitterClass((String) orchestratorProps.get(OrchestratorConstants.JOB_SUBMITTER));
orchestratorConfiguration.setSubmitterInterval((Integer) orchestratorProps.get(OrchestratorConstants.SUBMIT_INTERVAL));
orchestratorConfiguration.setThreadPoolSize((Integer) orchestratorProps.get(OrchestratorConstants.THREAD_POOL_SIZE));
orchestratorConfiguration.setStartSubmitter(Boolean.valueOf(orchestratorProps.getProperty(OrchestratorConstants.START_SUBMITTER)));
return orchestratorConfiguration;
}
public static boolean validateJobRequest(JobRequest request){
/* todo implement a job request validation */
return true;
}
}
| 9,529 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/utils/OrchestratorConstants.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.utils;
public class OrchestratorConstants {
private static final String SUBMITTER_PROPERTY = "job.submitter";
public static final String ORCHESTRATOR_PROPERTIES = "orchestrator.properties";
public static final int hotUpdateInterval=1000;
public static final String JOB_SUBMITTER = "job.submitter";
public static final String SUBMIT_INTERVAL = "submitter.interval";
public static final String THREAD_POOL_SIZE = "threadpool.size";
public static final String START_SUBMITTER = "start.submitter";
}
| 9,530 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/gfac/GFACInstance.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.gfac;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GFACInstance {
private final static Logger logger = LoggerFactory.getLogger(GFACInstance.class);
private String gfacURL;
private int currentLoad;
public GFACInstance(String gfacURL, int currentLoad) {
this.gfacURL = gfacURL;
this.currentLoad = currentLoad;
}
public String getGfacURL() {
return gfacURL;
}
public void setGfacURL(String gfacURL) {
this.gfacURL = gfacURL;
}
public int getCurrentLoad() {
return currentLoad;
}
public void setCurrentLoad(int currentLoad) {
this.currentLoad = currentLoad;
}
}
| 9,531 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/job/JobSubmitter.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.job;
import org.apache.airavata.orchestrator.core.context.OrchestratorContext;
import org.apache.airavata.orchestrator.core.gfac.GFACInstance;
import java.util.List;
public interface JobSubmitter {
/**
* @param context
* @return
*/
GFACInstance selectGFACInstance(OrchestratorContext context);
/**
* @param gfac
* @return
*/
boolean submitJob(GFACInstance gfac,List<String> experimentIDList);
}
| 9,532 |
0 | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core | Create_ds/airavata-sandbox/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/core/exception/OrchestratorException.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.orchestrator.core.exception;
public class OrchestratorException extends Exception{
private static final long serialVersionUID = -2849422320139467602L;
public OrchestratorException(Throwable e) {
super(e);
}
public OrchestratorException(String message) {
super(message, null);
}
public OrchestratorException(String message, Throwable e) {
super(message, e);
}
}
| 9,533 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/GramApplicationDescriptor.java | package org.apache.airavata.core.application;
public class GramApplicationDescriptor extends ApplicationDescriptor {
private String executablePath;
private String scratchLocation;
private String gramHost;
private String gridFTPEndpoint;
public String getGramHost() {
return gramHost;
}
public void setGramHost(String gramHost) {
this.gramHost = gramHost;
}
public String getGridFTPEndpoint() {
return gridFTPEndpoint;
}
public void setGridFTPEndpoint(String gridFTPEndpoint) {
this.gridFTPEndpoint = gridFTPEndpoint;
}
public String getExecutablePath() {
return executablePath;
}
public void setExecutablePath(String executablePath) {
this.executablePath = executablePath;
}
public String getScratchLocation() {
return scratchLocation;
}
public void setScratchLocation(String scratchLocation) {
this.scratchLocation = scratchLocation;
}
}
| 9,534 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/ParameterType.java | package org.apache.airavata.core.application;
public enum ParameterType {
STRING,
INTEGER;
}
| 9,535 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/ExperimentData.java | package org.apache.airavata.core.application;
import java.util.Date;
public class ExperimentData {
private String experimentId;
private String experimentTemplateId;
private Date submissionDate;
private String data;
private String inputData;
public ExperimentData() {
}
public ExperimentData(String experimentId, String experimentTemplateId,
Date submissionDate, String data, String inputData) {
this.experimentId = experimentId;
this.experimentTemplateId = experimentTemplateId;
this.submissionDate = submissionDate;
this.data = data;
this.inputData = inputData;
}
public String getExperimentId() {
return experimentId;
}
public void setExperimentId(String experimentId) {
this.experimentId = experimentId;
}
public String getExperimentTemplateId() {
return experimentTemplateId;
}
public void setExperimentTemplateId(String experimentTemplateId) {
this.experimentTemplateId = experimentTemplateId;
}
public Date getSubmissionDate() {
return submissionDate;
}
public void setSubmissionDate(Date submissionDate) {
this.submissionDate = submissionDate;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public String getInputData() {
return inputData;
}
public void setInputData(String inputData) {
this.inputData = inputData;
}
}
| 9,536 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/ApplicationParameter.java | package org.apache.airavata.core.application;
public class ApplicationParameter {
private String name;
private String value;
private ParameterType type;
public ApplicationParameter() {
}
public ApplicationParameter(String name, String value, ParameterType type) {
setName(name);
setValue(value);
setType(type);
}
public String getName(){
return name;
}
public void setName(String name){
this.name=name;
}
public String getValue(){
return value;
}
public void setValue(String value){
this.value=value;
}
public ParameterType getType(){
return type;
}
public void setType(ParameterType type){
this.type=type;
}
}
| 9,537 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/WorkflowInput.java | package org.apache.airavata.core.application;
public class WorkflowInput {
private String name;
private String value;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
| 9,538 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/ApplicationDescriptor.java | package org.apache.airavata.core.application;
import java.util.ArrayList;
import java.util.List;
public abstract class ApplicationDescriptor {
private String applicationName;
private List<ApplicationParameter> inputs;
private List<ApplicationParameter> outputs;
public String getApplicationName() {
return applicationName;
}
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public List<ApplicationParameter> getInputs() {
if (inputs==null){
inputs=new ArrayList<ApplicationParameter>();
}
return inputs;
}
public void setInputs(List<ApplicationParameter> inputs) {
this.inputs = inputs;
}
public List<ApplicationParameter> getOutputs() {
if (outputs==null){
outputs=new ArrayList<ApplicationParameter>();
}
return outputs;
}
public void setOutputs(List<ApplicationParameter> outputs) {
this.outputs = outputs;
}
}
| 9,539 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core | Create_ds/airavata-sandbox/api-mock/airavata-mock-core/src/main/java/org/apache/airavata/core/application/LocalApplicationDescriptor.java | package org.apache.airavata.core.application;
public class LocalApplicationDescriptor extends ApplicationDescriptor {
private String executablePath;
private String scratchLocation;
public String getExecutablePath() {
return executablePath;
}
public void setExecutablePath(String executablePath) {
this.executablePath = executablePath;
}
public String getScratchLocation() {
return scratchLocation;
}
public void setScratchLocation(String scratchLocation) {
this.scratchLocation = scratchLocation;
}
}
| 9,540 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata/service/HelpService.java | package org.apache.airavata.service;
import java.net.URI;
import java.net.URISyntaxException;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import org.apache.airavata.service.utils.ServiceUtils;
import org.apache.airavata.service.utils.help.HTMLHelpData;
import org.apache.airavata.service.utils.help.HelpData;
import org.apache.airavata.service.utils.help.MethodUtils;
import org.apache.airavata.service.utils.path.ApplicationPath;
import org.apache.airavata.service.utils.path.ExperimentPath;
import org.apache.airavata.service.utils.path.MainHelpPath;
/**
* curl -X PUT http://127.0.0.1:9090/orders-server/orders/1?customer_name=bob
* curl -X GET http://127.0.0.1:9090/orders-server/orders/1 curl -X GET
* http://127.0.0.1:9090/orders-server/orders/list
*/
@Path(MainHelpPath.SERVICE_PATH)
public class HelpService {
@Context
UriInfo uriInfo;
@Path(MainHelpPath.ENTRY)
@GET
@Produces(MediaType.TEXT_HTML)
public String add() throws URISyntaxException {
URI url = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
HelpData helpData = new HTMLHelpData("Airavata Mock API", "Welcome to Airavata API!!!");
helpData.getParameters().put("<a href='"+url.toString()+ApplicationPath.SERVICE_PATH+ApplicationPath.ADD_APPLICATION_HELP+"'>New Application</a>", "Add new application to Airavata system");
helpData.getParameters().put("<a href='"+url.toString()+ExperimentPath.SERVICE_PATH+ExperimentPath.ADD_TEMPLATE_HELP+"'>New Experiment Template</a>", "Add new application to Airavata system");
helpData.getParameters().put("<a href='"+url.toString()+ExperimentPath.SERVICE_PATH+ExperimentPath.RUN_EXPERIMENTS_HELP+"'>Launch Experiment</a>", "Launch an experiment from a experiment template in Airavata system");
helpData.getParameters().put("<a href='"+url.toString()+ExperimentPath.SERVICE_PATH+ExperimentPath.GET_RESULTS_HELP+"'>Get Experiment Results</a>", "Return the results of launching the experiment");
helpData.getParameters().put("<a href='"+url.toString()+ExperimentPath.SERVICE_PATH+ExperimentPath.LIST_TEMPLATES_HELP+"'>Experiment Template List</a>", "List of templates available.");
helpData.getParameters().put("<a href='"+url.toString()+ExperimentPath.SERVICE_PATH+ExperimentPath.LIST_EXPERIMENTS_HELP+"'>Experiments</a>", "List of experiments available.");
return MethodUtils.getHelpString(helpData);
}
} | 9,541 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata/service/ApplicationService.java | package org.apache.airavata.service;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import org.apache.airavata.core.application.ApplicationDescriptor;
import org.apache.airavata.core.application.ApplicationParameter;
import org.apache.airavata.core.application.LocalApplicationDescriptor;
import org.apache.airavata.core.application.ParameterType;
import org.apache.airavata.service.utils.ServiceUtils;
import org.apache.airavata.service.utils.help.HTMLHelpData;
import org.apache.airavata.service.utils.help.HelpData;
import org.apache.airavata.service.utils.help.MethodUtils;
import org.apache.airavata.service.utils.json.ConversionUtils;
import org.apache.airavata.service.utils.model.ApplicationDescriptorJSONFacotry;
import org.apache.airavata.service.utils.model.DataList;
import org.apache.airavata.service.utils.path.ApplicationPath;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
/**
* curl -X PUT http://127.0.0.1:9090/orders-server/orders/1?customer_name=bob
* curl -X GET http://127.0.0.1:9090/orders-server/orders/1 curl -X GET
* http://127.0.0.1:9090/orders-server/orders/list
*/
@Path(ApplicationPath.SERVICE_PATH)
public class ApplicationService {
@Context
UriInfo uriInfo;
@Path(ApplicationPath.ADD_APPLICATION)
@GET
@Produces(MediaType.TEXT_PLAIN)
public String add(@QueryParam("application") String application) {
ApplicationDescriptor obj;
try {
obj = ConversionUtils.getJavaObject(application, ApplicationDescriptorJSONFacotry.getInstance().getTypes(), ApplicationDescriptor.class);
String message=obj.getApplicationName()+" application added.";
System.out.println(message);
return obj.getApplicationName();
} catch (Exception e) {
throw new WebApplicationException(e);
}
}
@Path(ApplicationPath.ADD_APPLICATION_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String showHelp() {
HelpData helpData = new HTMLHelpData("New Application","Add the details of how to access an application from Airavata");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString()+"?application=<JSONString>");
helpData.getParameters().put("application", "Describes the application access data in JSON format. The supported JSON types are listed in the 'Notes' section.");
List<Class<?>> types = ApplicationDescriptorJSONFacotry.getInstance().getTypes();
for (Class<?> cl : types) {
String help="";
help+="<h3>"+ApplicationDescriptorJSONFacotry.getInstance().getTypeName(cl)+"</h3>\n";
help+="\t "+ApplicationDescriptorJSONFacotry.getInstance().getTypeDescription(cl)+"<br />\n";
help+="\t JSON template:\n"+"\t\t"+ApplicationDescriptorJSONFacotry.getInstance().getJSONTypeTemplate(cl)+"\n";
helpData.getNotes().add(help);
}
helpData.getExamples().add(uri.toString()+"?application={%22applicationName%22:%22echoApp%22,%22inputs%22:[{%22name%22:%22input_val%22,%22value%22:%22test%22,%22type%22:%22STRING%22}],%22outputs%22:[{%22name%22:%22output_val%22,%22value%22:%22test%22,%22type%22:%22STRING%22}],%22executablePath%22:null,%22scratchLocation%22:null}");
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (JsonGenerationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JsonMappingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("help called");
return MethodUtils.getHelpString(helpData);
}
public static void main(String[] args) throws JsonGenerationException,
JsonMappingException, IOException {
ApplicationDescriptor aa = new LocalApplicationDescriptor();
aa.setApplicationName("echoApp");
aa.getInputs().add(
new ApplicationParameter("input_val", "test",
ParameterType.STRING));
aa.getOutputs().add(
new ApplicationParameter("output_val", "test",
ParameterType.STRING));
ObjectMapper mapper = new ObjectMapper();
String s = mapper.writeValueAsString(aa);
System.out.println(s);
DataList d = new DataList();
ArrayList<String> list = new ArrayList<String>();
list.add("msg_part1=Hello");
list.add("msg_part2=World");
d.setList(list);
System.out.println(mapper.writeValueAsString(d));
// A bb = mapper.readValue(s, AA.class);
// System.out.println(bb.getValue());
}
public static interface A {
public String getValue();
public void setValue(String value);
}
public static class AA implements A {
private String value;
@Override
public String getValue() {
return value;
}
@Override
public void setValue(String value) {
this.value = value;
}
}
} | 9,542 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata | Create_ds/airavata-sandbox/api-mock/airavata-mock-service/src/main/java/org/apache/airavata/service/ExperimentService.java | package org.apache.airavata.service;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import org.apache.airavata.core.application.ExperimentData;
import org.apache.airavata.service.utils.ServiceUtils;
import org.apache.airavata.service.utils.help.HTMLHelpData;
import org.apache.airavata.service.utils.help.HelpData;
import org.apache.airavata.service.utils.help.MethodUtils;
import org.apache.airavata.service.utils.path.ExperimentPath;
@Path(ExperimentPath.SERVICE_PATH)
public class ExperimentService {
private static Map<String,String> templates=new HashMap<String, String>();
private static Map<String,String> experiments=new HashMap<String, String>();
private static Map<String,ExperimentData> experimentData=new HashMap<String, ExperimentData>();
@Context
UriInfo uriInfo;
@Path(ExperimentPath.ADD_TEMPLATE+"/{templateName}")
@GET
@Produces(MediaType.TEXT_PLAIN)
public String addTemplate(@PathParam("templateName") String templateId, @QueryParam("experimentTemplate") String experimentTemplate) {
templates.put(templateId, experimentTemplate);
String message=templateId+" added as an experiment template.";
System.out.println(message);
return templateId;
}
@Path(ExperimentPath.ADD_TEMPLATE_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String addTemplateHelp() {
HelpData helpData = new HTMLHelpData("Add Experiment","Add a experiment template (aka workflow) to Airavata");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString()+"/<templateName>?experimentTemplate=<Template_String>");
helpData.getParameters().put("templateName", "Name of this experiment.");
helpData.getParameters().put("experimentTemplate", "Describes the template for the experiment.");
// helpData.getExamples().add(uri.toString()+"?application={%22applicationName%22:%22echoApp%22,%22inputs%22:[{%22name%22:%22input_val%22,%22value%22:%22test%22,%22type%22:%22STRING%22}],%22outputs%22:[{%22name%22:%22output_val%22,%22value%22:%22test%22,%22type%22:%22STRING%22}],%22executablePath%22:null,%22scratchLocation%22:null}");
} catch (URISyntaxException e) {
e.printStackTrace();
}
return MethodUtils.getHelpString(helpData);
}
@Path(ExperimentPath.LIST_TEMPLATES)
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<String> getTemplates() {
return (Arrays.asList(templates.keySet().toArray(new String[]{})));
}
@Path(ExperimentPath.LIST_TEMPLATES_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String getTemplatesHelp() {
HelpData helpData = new HTMLHelpData("List Experiment Templates","Return a list of registered experiment templates");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString());
helpData.getExamples().add(uri.toString());
} catch (URISyntaxException e) {
e.printStackTrace();
}
return MethodUtils.getHelpString(helpData);
}
@Path(ExperimentPath.RUN_EXPERIMENTS+"/{templateName}")
@GET
@Produces(MediaType.TEXT_PLAIN)
public String runExperiment(@PathParam("templateName") String templateId, @QueryParam("experimentInput") String experimentInput) {
if (!templates.containsKey(templateId)){
throw new WebApplicationException(new Exception("The experiment template "+templateId+" does not exist!!!"));
}
UUID uuid = UUID.randomUUID();
experiments.put(uuid.toString(), experimentInput);
experimentData.put(uuid.toString(), new ExperimentData(uuid.toString(), templateId, Calendar.getInstance().getTime(), "test_data", experimentInput));
String message="Experiment "+uuid.toString()+" is executing...";
System.out.println(message);
return uuid.toString();
}
@Path(ExperimentPath.RUN_EXPERIMENTS_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String runExperimentHelp() {
HelpData helpData = new HTMLHelpData("Launch Experiments","Provide input data and configuration data to instantiate an experiment from an experiment template");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString()+"/<TemplateName>?experimentInput=<InputDataArray>");
helpData.getParameters().put("TemplateName", "Name of the experiment template to instantiate.");
helpData.getParameters().put("experimentInput", "List of input values to passed on to the intantiated experiment template");
helpData.getParameters().put("<RETURN_VALUE>", "A unique id identifying the experiment launched");
helpData.getExamples().add(uri.toString()+"/echo_workflow?experimentInput={\"list\":[\"msg_part1=Hello\",\"msg_part2=World\"]}");
} catch (URISyntaxException e) {
e.printStackTrace();
}
return MethodUtils.getHelpString(helpData);
}
@Path(ExperimentPath.LIST_EXPERIMENTS)
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<String> getExperiments() {
return (Arrays.asList(experiments.keySet().toArray(new String[]{})));
}
@Path(ExperimentPath.LIST_EXPERIMENTS_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String getExperimentsHelp() {
HelpData helpData = new HTMLHelpData("List Experiments Instantiated","Return a list of launched experiments");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString());
helpData.getExamples().add(uri.toString());
} catch (URISyntaxException e) {
e.printStackTrace();
}
return MethodUtils.getHelpString(helpData);
}
@Path(ExperimentPath.GET_RESULTS+"/{experimentId}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public ExperimentData getExperimentData(@PathParam ("experimentId") String experimentId) {
if (experimentData.containsKey(experimentId)){
return experimentData.get(experimentId);
}
throw new WebApplicationException(new Exception("no data for experiment id "+experimentId));
}
@Path(ExperimentPath.GET_RESULTS_HELP)
@GET
@Produces(MediaType.TEXT_HTML)
public String getExperimentDataHelp() {
HelpData helpData = new HTMLHelpData("Get Experiment Results","Retrieve execution results of the experiment");
try {
URI uri = ServiceUtils.getServiceOperationURIFromHelpURI(uriInfo);
helpData.getSyntax().add(uri.toString()+"/<ExperimentId>");
helpData.getParameters().put("ExperimentId","The id of the experiment");
helpData.getExamples().add(uri.toString()+"/UUID1328414123o12o321");
} catch (URISyntaxException e) {
e.printStackTrace();
}
return MethodUtils.getHelpString(helpData);
}
} | 9,543 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/ServiceUtils.java | package org.apache.airavata.service.utils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import javax.ws.rs.core.UriInfo;
import org.apache.airavata.service.utils.model.DataList;
public class ServiceUtils {
public static DataList WrapList(List<String> list){
DataList dataList = new DataList();
dataList.setList(list);
return dataList;
}
public static URI getServiceOperationURIFromHelpURI(UriInfo uriInfo) throws URISyntaxException {
String p = uriInfo.getAbsolutePath().getPath();
if (p.endsWith("/")){
p=p.substring(0,p.length()-1);
}
if (p.startsWith("/")){
p=p.substring(1);
}
String[] pathSegments = p.split("/");
String path="";
for (int i = 0; i < pathSegments.length-1; i++) {
path+="/"+pathSegments[i];
}
URI u = uriInfo.getBaseUri();
URI uri = new URI(u.getScheme(),u.getUserInfo(),u.getHost(),u.getPort(),path,null,null);
return uri;
}
}
| 9,544 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/path/ExperimentPath.java | package org.apache.airavata.service.utils.path;
public class ExperimentPath {
public static final String SERVICE_PATH="/experiments/";
public static final String ADD_TEMPLATE="add";
public static final String LIST_TEMPLATES="templates";
public static final String LIST_EXPERIMENTS="list";
public static final String RUN_EXPERIMENTS="run";
public static final String GET_RESULTS="results";
public static final String ADD_TEMPLATE_HELP="add/help";
public static final String LIST_TEMPLATES_HELP="templates/help";
public static final String LIST_EXPERIMENTS_HELP="list/help";
public static final String RUN_EXPERIMENTS_HELP="run/help";
public static final String GET_RESULTS_HELP="results/help";
}
| 9,545 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/path/MainHelpPath.java | package org.apache.airavata.service.utils.path;
public class MainHelpPath {
public static final String SERVICE_PATH="/help/";
public static final String ENTRY="";
}
| 9,546 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/path/ApplicationPath.java | package org.apache.airavata.service.utils.path;
public class ApplicationPath {
public static final String SERVICE_PATH="/applications/";
public static final String ADD_APPLICATION="add";
public static final String ADD_APPLICATION_HELP="add/help";
}
| 9,547 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/json/ConversionUtils.java | package org.apache.airavata.service.utils.json;
import java.io.IOException;
import java.util.List;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
public class ConversionUtils {
@SuppressWarnings("unchecked")
public static <T> T getJavaObject(String jsonString,
List<Class<?>> referenceClasses, Class<?> T) throws Exception {
Object obj = getJavaObject(jsonString, referenceClasses);
try {
T.cast(obj);
return (T)obj;
} catch (ClassCastException e) {
throw new Exception("Object is not of the type "+T.getCanonicalName());
}
}
public static String getJSONString(Object o) throws JsonGenerationException, JsonMappingException, IOException{
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(o);
}
public static Object getJavaObject(String jsonString,
List<Class<?>> referenceClasses) throws Exception {
ObjectMapper mapper = new ObjectMapper();
Object obj = null;
for (Class<?> c : referenceClasses) {
try {
obj = mapper.readValue(jsonString, c);
break;
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
if (obj==null){
throw new Exception("Invalid JSON String");
}
return obj;
}
}
| 9,548 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/model/DataList.java | package org.apache.airavata.service.utils.model;
import java.util.List;
public class DataList {
private List<?> list;
public List<?> getList() {
return list;
}
public void setList(List<?> list) {
this.list = list;
}
}
| 9,549 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/model/ApplicationDescriptorJSONFacotry.java | package org.apache.airavata.service.utils.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.airavata.core.application.ApplicationParameter;
import org.apache.airavata.core.application.GramApplicationDescriptor;
import org.apache.airavata.core.application.LocalApplicationDescriptor;
import org.apache.airavata.core.application.ParameterType;
import org.apache.airavata.service.utils.json.ConversionUtils;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
public class ApplicationDescriptorJSONFacotry implements JSONObjectFactory {
private static ApplicationDescriptorJSONFacotry defaultInstance;
private static List<Class<?>> applicationClasses = new ArrayList<Class<?>>();
static{
applicationClasses.add(LocalApplicationDescriptor.class);
applicationClasses.add(GramApplicationDescriptor.class);
}
private ApplicationDescriptorJSONFacotry() {
}
public static ApplicationDescriptorJSONFacotry getInstance(){
if (defaultInstance==null){
defaultInstance=new ApplicationDescriptorJSONFacotry();
}
return defaultInstance;
}
@Override
public List<Class<?>> getTypes() {
return applicationClasses;
}
@Override
public String getJSONTypeTemplate(Class<?> cl) throws JsonGenerationException, JsonMappingException, IOException {
String result=null;
if (cl==LocalApplicationDescriptor.class){
LocalApplicationDescriptor app = new LocalApplicationDescriptor();
app.setApplicationName("{application.name}");
app.getInputs().add(new ApplicationParameter("{input.parameter.name}","{input.parameter.value}",ParameterType.STRING));
app.getOutputs().add(new ApplicationParameter("{output.parameter.name}","{output.parameter.value}",ParameterType.STRING));
app.setExecutablePath("{application.executable.location}");
app.setScratchLocation("{scratch.directory.location}");
String jsonString = ConversionUtils.getJSONString(app);
jsonString=jsonString.replaceAll("STRING", "{parameter.type}");
result=jsonString;
} else if (cl==GramApplicationDescriptor.class){
GramApplicationDescriptor app = new GramApplicationDescriptor();
app.setApplicationName("{application.name}");
app.getInputs().add(new ApplicationParameter("{input.parameter.name}","{input.parameter.value}",ParameterType.STRING));
app.getOutputs().add(new ApplicationParameter("{output.parameter.name}","{output.parameter.value}",ParameterType.STRING));
app.setExecutablePath("{application.executable.location}");
app.setScratchLocation("{scratch.directory.location}");
app.setGramHost("{gram.host.ip.location}");
app.setGridFTPEndpoint("{grid.ftp.url}");
String jsonString = ConversionUtils.getJSONString(app);
jsonString=jsonString.replaceAll("STRING", "{parameter.type}");
result=jsonString;
}
return result;
}
@Override
public String getTypeName(Class<?> cl) {
return cl.getSimpleName();
}
@Override
public String getTypeDescription(Class<?> cl) {
String result=null;
if (cl==LocalApplicationDescriptor.class){
result="Defines computational resource residing in the host which Airavata server is running";
}else if (cl==GramApplicationDescriptor.class){
result="Defines computational resource residing in a GRAM host";
}
return result;
}
}
| 9,550 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/model/JSONObjectFactory.java | package org.apache.airavata.service.utils.model;
import java.io.IOException;
import java.util.List;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
public interface JSONObjectFactory {
public List<Class<?>> getTypes();
public String getTypeName(Class<?> cl);
public String getTypeDescription(Class<?> cl);
public String getJSONTypeTemplate(Class<?> cl) throws JsonGenerationException, JsonMappingException, IOException;
}
| 9,551 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/help/HelpData.java | package org.apache.airavata.service.utils.help;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class HelpData {
private String title;
private String description;
private List<String> syntax;
private Map<String,String> parameters;
private List<String> examples;
private List<String> notes;
public HelpData(String tile, String description) {
setTitle(tile);
setDescription(description);
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<String> getSyntax() {
if (syntax==null){
syntax=new ArrayList<String>();
}
return syntax;
}
public void setSyntax(List<String> syntax) {
this.syntax = syntax;
}
public Map<String, String> getParameters() {
if (parameters==null){
parameters=new HashMap<String, String>();
}
return parameters;
}
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
public List<String> getExamples() {
if (examples==null){
examples=new ArrayList<String>();
}
return examples;
}
public void setExamples(List<String> examples) {
this.examples = examples;
}
public List<String> getNotes() {
if (notes==null){
notes=new ArrayList<String>();
}
return notes;
}
public void setNotes(List<String> notes) {
this.notes = notes;
}
}
| 9,552 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/help/MethodUtils.java | package org.apache.airavata.service.utils.help;
public class MethodUtils {
public static String getHelpString(HelpData data){
if (data instanceof HTMLHelpData){
return getHTMLHelp((HTMLHelpData)data);
}else{
return getPlainHelp(data);
}
}
private static String getHTMLHelp(HTMLHelpData data) {
String help="<html><body>";
help+="<h1>"+data.getTitle()+"</h1>"+"\n\n";
help+="<p>"+data.getDescription()+"</p>\n\n";
help+="<br />";
if (data.getSyntax().size()>0){
help+="<p>"+"Syntax:\n<br />";
for (String syntax : data.getSyntax()) {
help+="\t"+syntax+"\n";
}
}
help+="\n\n";
if (data.getParameters().size()>0){
help+="<h2>Supported Parameters/Operations</h2>\n\n";
help+="<table>";
for (String parameterName : data.getParameters().keySet()) {
help+="<tr>";
help+="<td><b>"+parameterName+"</b></td><td>"+data.getParameters().get(parameterName)+"</td>\n";
help+="</tr>";
}
help+="</table>";
}
help+="\n";
if (data.getExamples().size()>0){
help+="<h2>Examples</h2>\n";
for (String example : data.getExamples()) {
help+="\t<p>"+example+"</p>\n";
}
help+="\n";
}
if (data.getNotes().size()>0){
help+="<h2>Notes</h2>\n";
for (String note : data.getNotes()) {
help+="<p>"+note+"</p>\n";
}
}
help+="</body></html>";
return help;
}
private static String getPlainHelp(HelpData data) {
String help=data.getTitle()+"\n\n";
help+="Usage: "+data.getDescription()+"\n\n";
if (data.getSyntax().size()>0){
help+="Syntax:\n";
for (String syntax : data.getSyntax()) {
help+="\t"+syntax+"\n";
}
}
help+="\n\n";
help+="Supported Parameters/Operations\n\n";
for (String parameterName : data.getParameters().keySet()) {
help+=parameterName+"\t\t"+data.getParameters().get(parameterName)+"\n";
}
help+="\n";
if (data.getExamples().size()>0){
help+="Examples:\n";
for (String example : data.getExamples()) {
help+="\t"+example+"\n";
}
help+="\n";
}
if (data.getNotes().size()>0){
help+="Notes:\n";
for (String note : data.getNotes()) {
help+=note+"\n";
}
}
return help;
}
}
| 9,553 |
0 | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils | Create_ds/airavata-sandbox/api-mock/airavata-mock-service-utils/src/main/java/org/apache/airavata/service/utils/help/HTMLHelpData.java | package org.apache.airavata.service.utils.help;
public class HTMLHelpData extends HelpData{
public HTMLHelpData(String tile, String description) {
super(tile, description);
}
}
| 9,554 |
0 | Create_ds/sagemaker-xgboost-container/docker/1.7-1/resources | Create_ds/sagemaker-xgboost-container/docker/1.7-1/resources/mms/ExecutionParameters.java | package software.amazon.ai.mms.plugins.endpoint;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.SerializedName;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import software.amazon.ai.mms.servingsdk.Context;
import software.amazon.ai.mms.servingsdk.ModelServerEndpoint;
import software.amazon.ai.mms.servingsdk.annotations.Endpoint;
import software.amazon.ai.mms.servingsdk.annotations.helpers.EndpointTypes;
import software.amazon.ai.mms.servingsdk.http.Request;
import software.amazon.ai.mms.servingsdk.http.Response;
/**
The modified endpoint source code for the jar used in this container.
You can create this endpoint by moving it by cloning the MMS repo:
> git clone https://github.com/awslabs/mxnet-model-server.git
Copy this file into plugins/endpoints/src/main/java/software/amazon/ai/mms/plugins/endpoints/
and then from the plugins directory, run:
> ./gradlew fJ
Modify file in plugins/endpoint/resources/META-INF/services/* to specify this file location
Then build the JAR:
> ./gradlew build
The jar should be available in plugins/endpoints/build/libs as endpoints-1.0.jar
**/
@Endpoint(
urlPattern = "execution-parameters",
endpointType = EndpointTypes.INFERENCE,
description = "Execution parameters endpoint")
public class ExecutionParameters extends ModelServerEndpoint {
@Override
public void doGet(Request req, Response rsp, Context ctx) throws IOException {
Properties prop = ctx.getConfig();
// 6 * 1024 * 1024
int maxRequestSize = Integer.parseInt(prop.getProperty("max_request_size", "6291456"));
SagemakerXgboostResponse response = new SagemakerXgboostResponse();
response.setMaxConcurrentTransforms(Integer.parseInt(prop.getProperty("NUM_WORKERS", "1")));
response.setBatchStrategy("MULTI_RECORD");
response.setMaxPayloadInMB(maxRequestSize / (1024 * 1024));
rsp.getOutputStream()
.write(
new GsonBuilder()
.setPrettyPrinting()
.create()
.toJson(response)
.getBytes(StandardCharsets.UTF_8));
}
/** Response for Model server endpoint */
public static class SagemakerXgboostResponse {
@SerializedName("MaxConcurrentTransforms")
private int maxConcurrentTransforms;
@SerializedName("BatchStrategy")
private String batchStrategy;
@SerializedName("MaxPayloadInMB")
private int maxPayloadInMB;
public SagemakerXgboostResponse() {
maxConcurrentTransforms = 4;
batchStrategy = "MULTI_RECORD";
maxPayloadInMB = 6;
}
public int getMaxConcurrentTransforms() {
return maxConcurrentTransforms;
}
public String getBatchStrategy() {
return batchStrategy;
}
public int getMaxPayloadInMB() {
return maxPayloadInMB;
}
public void setMaxConcurrentTransforms(int newMaxConcurrentTransforms) {
maxConcurrentTransforms = newMaxConcurrentTransforms;
}
public void setBatchStrategy(String newBatchStrategy) {
batchStrategy = newBatchStrategy;
}
public void setMaxPayloadInMB(int newMaxPayloadInMB) {
maxPayloadInMB = newMaxPayloadInMB;
}
}
}
| 9,555 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-environment-variable-config-filter/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-environment-variable-config-filter/src/test/java/org/apache/flume/configfilter/TestEnvironmentVariableConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.configfilter;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class TestEnvironmentVariableConfigFilter {
public static final String MY_PASSWORD_KEY = "my_password_key";
public static final String MY_PASSWORD_KEY_2 = "my_password_key2";
public static final String FILTERED = "filtered";
public static final String FILTERED_2 = "filtered2";
@Rule
public final EnvironmentVariables environmentVariables
= new EnvironmentVariables();
@Test
public void filter() {
environmentVariables.set(MY_PASSWORD_KEY, FILTERED);
environmentVariables.set(MY_PASSWORD_KEY_2, FILTERED_2);
ConfigFilter configFilter = new EnvironmentVariableConfigFilter();
assertEquals(FILTERED, configFilter.filter(MY_PASSWORD_KEY));
assertEquals(FILTERED_2, configFilter.filter(MY_PASSWORD_KEY_2));
}
@Test
public void filterUnknownKey() {
ConfigFilter configFilter = new EnvironmentVariableConfigFilter();
assertNull(configFilter.filter("unknown"));
}
} | 9,556 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-environment-variable-config-filter/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-environment-variable-config-filter/src/main/java/org/apache/flume/configfilter/EnvironmentVariableConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.configfilter;
import java.util.Map;
public class EnvironmentVariableConfigFilter extends AbstractConfigFilter {
@Override
public String filter(String key) {
return System.getenv(key);
}
@Override
public void initializeWithConfiguration(Map<String, String> configuration) {
//NO-OP
}
}
| 9,557 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-external-process-config-filter/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-external-process-config-filter/src/test/java/org/apache/flume/configfilter/TestExternalProcessConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.configfilter;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class TestExternalProcessConfigFilter {
public static final String MY_PASSWORD_KEY = "my_password_key";
public static final String MY_PASSWORD_KEY_2 = "my_password_key2";
public static final String FILTERED = "filtered";
public static final String FILTERED_2 = "filtered2";
private ExternalProcessConfigFilter configFilter;
@Before
public void setUp() {
configFilter = new ExternalProcessConfigFilter();
}
@Test
public void filterOk() {
String file = Thread.currentThread().getContextClassLoader()
.getResource("test.sh").getFile();
File testExecutable = new File(file);
testExecutable.setExecutable(true);
HashMap<String, String> configuration = new HashMap<>();
configuration.put("command", file);
configFilter.initializeWithConfiguration(configuration);
assertEquals(FILTERED, configFilter.filter(MY_PASSWORD_KEY));
assertEquals(FILTERED_2, configFilter.filter(MY_PASSWORD_KEY_2));
}
@Test
public void filterError() {
String file = Thread.currentThread().getContextClassLoader()
.getResource("test_error.sh").getFile();
File testExecutable = new File(file);
testExecutable.setExecutable(true);
HashMap<String, String> configuration = new HashMap<>();
configuration.put("command", file);
configFilter.initializeWithConfiguration(configuration);
assertNull(configFilter.filter(MY_PASSWORD_KEY));
}
} | 9,558 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-external-process-config-filter/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-external-process-config-filter/src/main/java/org/apache/flume/configfilter/ExternalProcessConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.configfilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import java.util.Arrays;
import java.util.Map;
import java.util.Scanner;
public class ExternalProcessConfigFilter extends AbstractConfigFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(ExternalProcessConfigFilter.class);
private static final String COMMAND_KEY = "command";
private static final String CHARSET_KEY = "charset";
private static final String CHARSET_DEFAULT = "UTF-8";
Charset charset;
private String command;
@Override
public String filter(String key) {
try {
return execCommand(key);
} catch (InterruptedException | IllegalStateException | IOException ex) {
LOGGER.error("Error while reading value for key {}: ", key, ex);
}
return null;
}
@Override
public void initializeWithConfiguration(Map<String, String> configuration) {
String charsetName = configuration.getOrDefault(CHARSET_KEY, CHARSET_DEFAULT);
try {
charset = Charset.forName(charsetName);
} catch (UnsupportedCharsetException ex) {
throw new RuntimeException("Unsupported charset: " + charsetName, ex);
}
command = configuration.get(COMMAND_KEY);
if (command == null) {
throw new IllegalArgumentException(COMMAND_KEY + " must be set for " +
"ExternalProcessConfigFilter");
}
}
private String execCommand(String key) throws IOException, InterruptedException {
String[] split = command.split("\\s+");
int newLength = split.length + 1;
String[] commandParts = Arrays.copyOf(split, newLength);
commandParts[newLength - 1] = key;
Process p = Runtime.getRuntime().exec(commandParts);
p.waitFor();
if (p.exitValue() != 0) {
String stderr;
try {
stderr = getResultFromStream(p.getErrorStream());
} catch (Throwable t) {
stderr = null;
}
throw new IllegalStateException(
String.format("Process (%s) exited with non-zero (%s) status code. Sterr: %s",
this.command, p.exitValue(), stderr));
}
return getResultFromStream(p.getInputStream());
}
private String getResultFromStream(InputStream inputStream) {
try (Scanner scanner = new Scanner(inputStream, charset.name())) {
String result = null;
if (scanner.hasNextLine()) {
result = scanner.nextLine();
if (scanner.hasNextLine()) {
LOGGER.warn("External process has more than one line of output. " +
"Only the first line is used.");
}
} else {
LOGGER.warn("External process has not produced any output.");
}
return result;
}
}
}
| 9,559 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-config-filter-api/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-config-filter-api/src/main/java/org/apache/flume/configfilter/ConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.configfilter;
import java.util.Map;
/**
* ConfigFilter is a tool for replacing sensitive or generated data in Flume configuration
*
*/
public interface ConfigFilter {
/**
* Filter method that returns the value associated with the given key
*
* @param key the key to look up in the concrete implementations
* @return the value represented by the key
*/
String filter(String key);
/**
* Sets the component name. Required by the configuration management.
*
* @param name
*/
void setName(String name);
/**
* Returns the component name. Required by the configuration management.
*
* @return String the component name
*/
String getName();
/**
* A method to configure the component
*
* @param configuration The map of configuration options needed by concrete implementations.
*/
void initializeWithConfiguration(Map<String, String> configuration);
}
| 9,560 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-config-filter-api/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-config-filter-api/src/main/java/org/apache/flume/configfilter/AbstractConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.flume.configfilter;
/**
* A base implementation of the common methods for Configuration filters
*/
public abstract class AbstractConfigFilter implements ConfigFilter {
private String name;
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
}
| 9,561 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-hadoop-credential-store-config-filter/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-hadoop-credential-store-config-filter/src/test/java/org/apache/flume/configfilter/TestHadoopCredentialStoreConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.flume.configfilter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.alias.CredentialShell;
import org.apache.hadoop.util.ToolRunner;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.HashMap;
import static org.apache.flume.configfilter.HadoopCredentialStoreConfigFilter.CREDENTIAL_PROVIDER_PATH;
import static org.apache.flume.configfilter.HadoopCredentialStoreConfigFilter.HADOOP_SECURITY;
import static org.apache.flume.configfilter.HadoopCredentialStoreConfigFilter.PASSWORD_FILE_CONFIG_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
public class TestHadoopCredentialStoreConfigFilter {
private static String providerPathDefault;
private static String providerPathEnv;
private static String providerPathPwdFile;
@ClassRule
public static final EnvironmentVariables environmentVariables
= new EnvironmentVariables();
private static File fileDefault;
private static File fileEnvPassword;
private static File fileFilePassword;
private HadoopCredentialStoreConfigFilter configFilter;
@BeforeClass
public static void setUpClass() throws Exception {
generateTempFileNames();
fillCredStoreWithDefaultPassword();
fillCredStoreWithPasswordFile();
fillCredStoreWithEnvironmentVariablePassword();
}
@AfterClass
public static void tearDown() {
fileDefault.deleteOnExit();
fileEnvPassword.deleteOnExit();
fileFilePassword.deleteOnExit();
}
@Before
public void setUp() {
String[] objects = System.getenv().keySet().toArray(new String[0]);
environmentVariables.clear(objects);
configFilter = new HadoopCredentialStoreConfigFilter();
}
@Test
public void filterDefaultPasswordFile() {
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathDefault);
configFilter.initializeWithConfiguration(configuration);
assertEquals("filtered_default", configFilter.filter("password"));
}
@Test
public void filterWithEnvPassword() {
environmentVariables.set("HADOOP_CREDSTORE_PASSWORD","envSecret");
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathEnv);
configFilter.initializeWithConfiguration(configuration);
assertEquals("filtered_env", configFilter.filter("password"));
}
@Test
public void filterWithPasswordFile() {
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathPwdFile);
configuration.put(PASSWORD_FILE_CONFIG_KEY, "test-password.txt");
configFilter.initializeWithConfiguration(configuration);
assertEquals("filtered_file", configFilter.filter("password"));
}
@Test
public void filterWithEnvNoPassword() {
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathEnv);
configFilter.initializeWithConfiguration(configuration);
assertNull(configFilter.filter("password"));
}
@Test
public void filterErrorWithPasswordFileWrongPassword() {
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathPwdFile);
configuration.put(PASSWORD_FILE_CONFIG_KEY, "test-password2.txt");
configFilter.initializeWithConfiguration(configuration);
assertNull(configFilter.filter("password"));
}
@Test
public void filterErrorWithPasswordFileNoPasswordFile() {
HashMap<String, String> configuration = new HashMap<>();
configuration.put(CREDENTIAL_PROVIDER_PATH, providerPathPwdFile);
configFilter.initializeWithConfiguration(configuration);
assertNull(configFilter.filter("password"));
}
@Test(expected = IllegalArgumentException.class)
public void filterErrorWithNoProvider() {
HashMap<String, String> configuration = new HashMap<>();
configFilter.initializeWithConfiguration(configuration);
}
private static void fillCredStoreWithEnvironmentVariablePassword() throws Exception {
environmentVariables.set("HADOOP_CREDSTORE_PASSWORD", "envSecret");
runCommand("create password -value filtered_env -provider "
+ providerPathEnv, new Configuration());
}
private static void fillCredStoreWithPasswordFile() throws Exception {
Configuration conf = new Configuration();
conf.set(
HADOOP_SECURITY + PASSWORD_FILE_CONFIG_KEY,
"test-password.txt"
);
runCommand("create password -value filtered_file -provider "
+ providerPathPwdFile, conf);
}
private static void fillCredStoreWithDefaultPassword() throws Exception {
runCommand("create password -value filtered_default -provider "
+ providerPathDefault, new Configuration());
}
private static void generateTempFileNames() throws IOException {
fileDefault = Files.createTempFile("test-default-pwd-", ".jceks").toFile();
boolean deleted = fileDefault.delete();
fileEnvPassword = Files.createTempFile("test-env-pwd-", ".jceks").toFile();
deleted &= fileEnvPassword.delete();
fileFilePassword = Files.createTempFile("test-file-pwd-", ".jceks").toFile();
deleted &= fileFilePassword.delete();
if (!deleted) {
fail("Could not delete temporary files");
}
providerPathDefault = "jceks://file/" + fileDefault.getAbsolutePath();
providerPathEnv = "jceks://file/" + fileEnvPassword.getAbsolutePath();
providerPathPwdFile = "jceks://file/" + fileFilePassword.getAbsolutePath();
}
private static void runCommand(String c, Configuration conf) throws Exception {
ToolRunner.run(conf, new CredentialShell(), c.split(" "));
}
} | 9,562 |
0 | Create_ds/flume/flume-ng-configfilters/flume-ng-hadoop-credential-store-config-filter/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-configfilters/flume-ng-hadoop-credential-store-config-filter/src/main/java/org/apache/flume/configfilter/HadoopCredentialStoreConfigFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.flume.configfilter;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HadoopCredentialStoreConfigFilter extends AbstractConfigFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(
HadoopCredentialStoreConfigFilter.class);
static final String PASSWORD_FILE_CONFIG_KEY
= "credstore.java-keystore-provider.password-file";
static final String CREDENTIAL_PROVIDER_PATH
= "credential.provider.path";
static final String HADOOP_SECURITY = "hadoop.security.";
private Configuration hadoopConfiguration;
public void initializeWithConfiguration(Map<String, String> configuration) {
LOGGER.debug("Initializing hadoop credential store.");
hadoopConfiguration = new Configuration();
hadoopConfiguration.set(
HADOOP_SECURITY + CREDENTIAL_PROVIDER_PATH,
configuration.get(CREDENTIAL_PROVIDER_PATH)
);
String passwordFile = configuration.get(PASSWORD_FILE_CONFIG_KEY);
if (passwordFile != null && !passwordFile.isEmpty()) {
checkPasswordFile(passwordFile);
hadoopConfiguration.set(
HADOOP_SECURITY + PASSWORD_FILE_CONFIG_KEY, passwordFile
);
}
}
private void checkPasswordFile(String passwordFile) {
if (Thread.currentThread().getContextClassLoader().getResource(passwordFile) == null) {
LOGGER.error("The java keystore provider password file has to be on the classpath." +
" The password file provided in the configuration cannot be found and will not be used"
);
}
}
@Override
public String filter(String key) {
char[] result = null;
try {
result = hadoopConfiguration.getPassword(key);
} catch (IOException e) {
LOGGER.error("Error while reading value for key {}: ", key, e);
}
return result == null ? null : new String(result);
}
}
| 9,563 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/source/TestNetcatSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.collect.Lists;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.EventDrivenSource;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.lifecycle.LifecycleException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Writer;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.nio.channels.Channels;
import java.nio.channels.SocketChannel;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
@RunWith(value = Parameterized.class)
public class TestNetcatSource {
private Channel channel;
private EventDrivenSource source;
private boolean ackEveryEvent;
private static final Logger logger =
LoggerFactory.getLogger(TestNetcatSource.class);
public TestNetcatSource(boolean ackForEveryEvent) {
ackEveryEvent = ackForEveryEvent;
}
@Parameters
public static Collection<?> data() {
Object[][] data = new Object[][] { { true }, { false } };
return Arrays.asList(data);
}
private static int getFreePort() {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
} catch (IOException e) {
throw new AssertionError("Can not open socket", e);
}
}
@Before
public void setUp() {
logger.info("Running setup");
channel = new MemoryChannel();
source = new NetcatSource();
Context context = new Context();
Configurables.configure(channel, context);
List<Channel> channels = Lists.newArrayList(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
}
@Test
public void testLifecycle() throws InterruptedException, LifecycleException,
EventDeliveryException {
final int port = getFreePort();
ExecutorService executor = Executors.newFixedThreadPool(3);
Context context = new Context();
context.put("bind", "0.0.0.0");
context.put("port", String.valueOf(port));
context.put("ack-every-event", String.valueOf(ackEveryEvent));
Configurables.configure(source, context);
source.start();
Runnable clientRequestRunnable = new Runnable() {
@Override
public void run() {
try {
SocketChannel clientChannel = SocketChannel
.open(new InetSocketAddress(port));
Writer writer = Channels.newWriter(clientChannel, "utf-8");
BufferedReader reader = new BufferedReader(
Channels.newReader(clientChannel, "utf-8"));
writer.write("Test message\n");
writer.flush();
if (ackEveryEvent) {
String response = reader.readLine();
Assert.assertEquals("Server should return OK", "OK", response);
} else {
Assert.assertFalse("Server should not return anything", reader.ready());
}
clientChannel.close();
} catch (IOException e) {
logger.error("Caught exception: ", e);
}
}
};
ChannelSelector selector = source.getChannelProcessor().getSelector();
Transaction tx = selector.getAllChannels().get(0).getTransaction();
tx.begin();
for (int i = 0; i < 100; i++) {
logger.info("Sending request");
executor.submit(clientRequestRunnable);
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertArrayEquals("Test message".getBytes(), event.getBody());
}
tx.commit();
tx.close();
executor.shutdown();
while (!executor.isTerminated()) {
executor.awaitTermination(500, TimeUnit.MILLISECONDS);
}
source.stop();
}
}
| 9,564 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/source/EventProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.Event;
/**
* Interface indicating processEvent is implemented.
*/
public interface EventProcessor {
/**
* When implemented causes the event to be handled by the component.
* @param event The Flume event.
*/
default void processEvent(Event event) {
}
}
| 9,565 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/source/LocalSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDrivenSource;
import org.apache.flume.conf.Configurable;
import org.apache.flume.instrumentation.SourceCounter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Accepts an event from a local component and publishes it to a channel.
*/
public class LocalSource extends AbstractSource implements Configurable, EventDrivenSource, EventProcessor {
private static final Logger logger = LoggerFactory.getLogger(LocalSource.class);
private SourceCounter sourceCounter;
/**
* Called by flume to start this source.
*/
@Override
public void start() {
logger.info("Local source {} starting.", getName());
sourceCounter.start();
super.start();
}
/**
* Called by flume to stop this source.
*/
@Override
public void stop() {
logger.info("Local source {} stopping.", getName());
sourceCounter.stop();
super.stop();
logger.info("Local source {} stopped. Metrics: {}", getName(), sourceCounter);
}
/**
* A message is passed in here. It is data that should be passed on.
*
* @param event The message.
*/
public void processEvent(Event event) {
if (event == null) {
//Ignoring this. Not counting as an event received either.
return;
}
sourceCounter.incrementAppendReceivedCount();
sourceCounter.incrementEventReceivedCount();
logger.debug("pushing event to channel");
getChannelProcessor().processEvent(event);
sourceCounter.incrementAppendAcceptedCount();
sourceCounter.incrementEventAcceptedCount();
}
/**
* Called when flume starts up.
*
* @param context - Config values for this source from flume properties file.
*/
@Override
public void configure(Context context) {
if (sourceCounter == null) {
sourceCounter = new SourceCounter(getName());
}
}
}
| 9,566 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/sink/NullInitSink.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.sink;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.CounterGroup;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Sink;
import org.apache.flume.Source;
import org.apache.flume.SourceRunner;
import org.apache.flume.Transaction;
import org.apache.flume.node.Initializable;
import org.apache.flume.node.MaterializedConfiguration;
import org.apache.flume.source.EventProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* A {@link Sink} implementation that simply discards all events it receives. A
* <tt>/dev/null</tt> for Flume.
* </p>
* <p>
* <b>Configuration options</b>
* </p>
* <p>
* <i>This sink has no configuration parameters.</i>
* </p>
* <p>
* <b>Metrics</b>
* </p>
* <p>
* TODO
* </p>
*/
public class NullInitSink extends NullSink implements Initializable {
private static final Logger logger = LoggerFactory.getLogger(NullInitSink.class);
private String sourceName = null;
private EventProcessor eventProcessor = null;
private long total = 0;
public NullInitSink() {
super();
}
@Override
public void configure(Context context) {
sourceName = context.getString("targetSource");
super.configure(context);
}
@Override
public void initialize(MaterializedConfiguration configuration) {
logger.debug("Locating source for event publishing");
for (Map.Entry<String, SourceRunner> entry : configuration.getSourceRunners().entrySet()) {
if (entry.getKey().equals(sourceName)) {
Source source = entry.getValue().getSource();
if (source instanceof EventProcessor) {
eventProcessor = (EventProcessor) source;
logger.debug("Found event processor {}", source.getName());
return;
}
}
}
logger.warn("No Source named {} found for republishing events.", sourceName);
}
@Override
public Status process() throws EventDeliveryException {
Status status = Status.READY;
Channel channel = getChannel();
Transaction transaction = channel.getTransaction();
Event event = null;
CounterGroup counterGroup = getCounterGroup();
long batchSize = getBatchSize();
long eventCounter = counterGroup.get("events.success");
try {
transaction.begin();
int i = 0;
for (i = 0; i < batchSize; i++) {
event = channel.take();
if (event != null) {
long id = Long.parseLong(new String(event.getBody()));
total += id;
event.getHeaders().put("Total", Long.toString(total));
eventProcessor.processEvent(event);
logger.info("Null sink {} successful processed event {}", getName(), id);
} else {
status = Status.BACKOFF;
break;
}
}
transaction.commit();
counterGroup.addAndGet("events.success", (long) Math.min(batchSize, i));
counterGroup.incrementAndGet("transaction.success");
} catch (Exception ex) {
transaction.rollback();
counterGroup.incrementAndGet("transaction.failed");
logger.error("Failed to deliver event. Exception follows.", ex);
throw new EventDeliveryException("Failed to deliver event: " + event, ex);
} finally {
transaction.close();
}
return status;
}
}
| 9,567 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestPollingZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import junit.framework.Assert;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.conf.FlumeConfiguration.AgentConfiguration;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.Test;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
public class TestPollingZooKeeperConfigurationProvider extends
TestAbstractZooKeeperConfigurationProvider {
private EventBus eb;
private EventSync es;
private PollingZooKeeperConfigurationProvider cp;
private class EventSync {
private boolean notified;
@Subscribe
public synchronized void notifyEvent(MaterializedConfiguration mConfig) {
notified = true;
notifyAll();
}
public synchronized void awaitEvent() throws InterruptedException {
while (!notified) {
wait();
}
}
public synchronized void reset() {
notified = false;
}
}
@Override
protected void doSetUp() throws Exception {
eb = new EventBus("test");
es = new EventSync();
es.reset();
eb.register(es);
cp = new PollingZooKeeperConfigurationProvider(AGENT_NAME, "localhost:"
+ zkServer.getPort(), null, eb);
cp.start();
LifecycleController.waitForOneOf(cp, LifecycleState.START_OR_ERROR);
}
@Override
protected void doTearDown() throws Exception {
// do nothing
}
@Test
public void testPolling() throws Exception {
es.awaitEvent();
es.reset();
FlumeConfiguration fc = cp.getFlumeConfiguration();
Assert.assertTrue(fc.getConfigurationErrors().isEmpty());
AgentConfiguration ac = fc.getConfigurationFor(AGENT_NAME);
Assert.assertNull(ac);
addData();
es.awaitEvent();
es.reset();
verifyProperties(cp);
}
}
| 9,568 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestClasspathConfigurationSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.net.URI;
import java.util.Properties;
import org.junit.Assert;
import org.junit.Test;
/**
* Tests that files can be loaded from the Classpath.
*/
public class TestClasspathConfigurationSource {
@Test
public void testClasspath() throws Exception {
URI confFile = new URI("classpath:///flume-conf.properties");
ConfigurationSource source = new ClasspathConfigurationSource(confFile);
Assert.assertNotNull("No configuration returned", source);
Properties props = new Properties();
props.load(source.getInputStream());
String value = props.getProperty("host1.sources");
Assert.assertNotNull("Missing key", value);
}
@Test
public void testOddClasspath() throws Exception {
URI confFile = new URI("classpath:/flume-conf.properties");
ConfigurationSource source = new ClasspathConfigurationSource(confFile);
Assert.assertNotNull("No configuration returned", source);
Properties props = new Properties();
props.load(source.getInputStream());
String value = props.getProperty("host1.sources");
Assert.assertNotNull("Missing key", value);
}
@Test
public void testImproperClasspath() throws Exception {
URI confFile = new URI("classpath://flume-conf.properties");
ConfigurationSource source = new ClasspathConfigurationSource(confFile);
Assert.assertNotNull("No configuration returned", source);
Properties props = new Properties();
props.load(source.getInputStream());
String value = props.getProperty("host1.sources");
Assert.assertNotNull("Missing key", value);
}
@Test
public void testShorthandClasspath() throws Exception {
URI confFile = new URI("classpath:flume-conf.properties");
ConfigurationSource source = new ClasspathConfigurationSource(confFile);
Assert.assertNotNull("No configuration returned", source);
Properties props = new Properties();
props.load(source.getInputStream());
String value = props.getProperty("host1.sources");
Assert.assertNotNull("Missing key", value);
}
}
| 9,569 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestEnvVarResolverProperties.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import java.io.File;
public class TestEnvVarResolverProperties {
private static final File TESTFILE = new File(
TestEnvVarResolverProperties.class.getClassLoader()
.getResource("flume-conf-with-envvars.properties").getFile());
@Rule
public final EnvironmentVariables environmentVariables = new EnvironmentVariables();
private PropertiesFileConfigurationProvider provider;
@Before
public void setUp() throws Exception {
provider = new PropertiesFileConfigurationProvider("a1", TESTFILE);
}
@Test
public void resolveEnvVar() throws Exception {
environmentVariables.set("VARNAME", "varvalue");
String resolved = EnvVarResolverProperties.resolveEnvVars("padding ${VARNAME} padding");
Assert.assertEquals("padding varvalue padding", resolved);
}
@Test
public void resolveEnvVars() throws Exception {
environmentVariables.set("VARNAME1", "varvalue1");
environmentVariables.set("VARNAME2", "varvalue2");
String resolved = EnvVarResolverProperties
.resolveEnvVars("padding ${VARNAME1} ${VARNAME2} padding");
Assert.assertEquals("padding varvalue1 varvalue2 padding", resolved);
}
@Test
public void getProperty() throws Exception {
String NC_PORT = "6667";
environmentVariables.set("NC_PORT", NC_PORT);
System.setProperty("propertiesImplementation",
"org.apache.flume.node.EnvVarResolverProperties");
Assert.assertEquals(NC_PORT, provider.getFlumeConfiguration()
.getConfigurationFor("a1")
.getSourceContext().get("r1").getParameters().get("port"));
}
}
| 9,570 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestPollingPropertiesFileConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.util.List;
import junit.framework.Assert;
import org.apache.commons.io.FileUtils;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import com.google.common.io.Files;
public class TestPollingPropertiesFileConfigurationProvider {
private static final File TESTFILE = new File(
TestPollingPropertiesFileConfigurationProvider.class.getClassLoader()
.getResource("flume-conf.properties").getFile());
private UriConfigurationProvider provider;
private File baseDir;
private File configFile;
private EventBus eventBus;
@Before
public void setUp() throws Exception {
baseDir = Files.createTempDir();
configFile = new File(baseDir, TESTFILE.getName());
Files.copy(TESTFILE, configFile);
eventBus = new EventBus("test");
ConfigurationSource source = new FileConfigurationSource(configFile.toURI());
provider = new UriConfigurationProvider("host1", Lists.newArrayList(source), null,
eventBus, 1);
provider.start();
LifecycleController.waitForOneOf(provider, LifecycleState.START_OR_ERROR);
}
@After
public void tearDown() throws Exception {
FileUtils.deleteDirectory(baseDir);
provider.stop();
}
@Test
public void testPolling() throws Exception {
// let first event fire
Thread.sleep(2000L);
final List<MaterializedConfiguration> events = Lists.newArrayList();
Object eventHandler = new Object() {
@Subscribe
public synchronized void handleConfigurationEvent(MaterializedConfiguration event) {
events.add(event);
}
};
eventBus.register(eventHandler);
configFile.setLastModified(System.currentTimeMillis());
// now wait for second event to fire
Thread.sleep(2000L);
Assert.assertEquals(String.valueOf(events), 1, events.size());
MaterializedConfiguration materializedConfiguration = events.remove(0);
Assert.assertEquals(1, materializedConfiguration.getSourceRunners().size());
Assert.assertEquals(1, materializedConfiguration.getSinkRunners().size());
Assert.assertEquals(1, materializedConfiguration.getChannels().size());
}
}
| 9,571 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestApplication.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.node;
import static org.mockito.Mockito.*;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.io.FileUtils;
import org.apache.flume.Channel;
import org.apache.flume.Event;
import org.apache.flume.SinkRunner;
import org.apache.flume.SourceRunner;
import org.apache.flume.Transaction;
import org.apache.flume.event.SimpleEvent;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.lifecycle.LifecycleState;
import org.apache.flume.source.EventProcessor;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.collect.Lists;
import com.google.common.eventbus.EventBus;
import com.google.common.io.Files;
public class TestApplication {
private File baseDir;
@Before
public void setup() throws Exception {
baseDir = Files.createTempDir();
}
@After
public void tearDown() throws Exception {
FileUtils.deleteDirectory(baseDir);
}
private <T extends LifecycleAware> T mockLifeCycle(Class<T> klass) {
T lifeCycleAware = mock(klass);
final AtomicReference<LifecycleState> state =
new AtomicReference<LifecycleState>();
state.set(LifecycleState.IDLE);
when(lifeCycleAware.getLifecycleState()).then(new Answer<LifecycleState>() {
@Override
public LifecycleState answer(InvocationOnMock invocation)
throws Throwable {
return state.get();
}
});
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
state.set(LifecycleState.START);
return null;
}
}).when(lifeCycleAware).start();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
state.set(LifecycleState.STOP);
return null;
}
}).when(lifeCycleAware).stop();
return lifeCycleAware;
}
@Test
public void testBasicConfiguration() throws Exception {
EventBus eventBus = new EventBus("test-event-bus");
MaterializedConfiguration materializedConfiguration = new
SimpleMaterializedConfiguration();
SourceRunner sourceRunner = mockLifeCycle(SourceRunner.class);
materializedConfiguration.addSourceRunner("test", sourceRunner);
SinkRunner sinkRunner = mockLifeCycle(SinkRunner.class);
materializedConfiguration.addSinkRunner("test", sinkRunner);
Channel channel = mockLifeCycle(Channel.class);
materializedConfiguration.addChannel("test", channel);
ConfigurationProvider configurationProvider = mock(ConfigurationProvider.class);
when(configurationProvider.getConfiguration()).thenReturn(materializedConfiguration);
Application application = new Application();
eventBus.register(application);
eventBus.post(materializedConfiguration);
application.start();
Thread.sleep(1000L);
verify(sourceRunner).start();
verify(sinkRunner).start();
verify(channel).start();
application.stop();
Thread.sleep(1000L);
verify(sourceRunner).stop();
verify(sinkRunner).stop();
verify(channel).stop();
}
@Test
public void testFLUME1854() throws Exception {
File configFile = new File(baseDir, "flume-conf.properties");
Files.copy(new File(getClass().getClassLoader()
.getResource("flume-conf.properties").getFile()), configFile);
Random random = new Random();
for (int i = 0; i < 3; i++) {
EventBus eventBus = new EventBus("test-event-bus");
PollingPropertiesFileConfigurationProvider configurationProvider =
new PollingPropertiesFileConfigurationProvider("host1",
configFile, eventBus, 1);
List<LifecycleAware> components = Lists.newArrayList();
components.add(configurationProvider);
Application application = new Application(components);
eventBus.register(application);
application.start();
Thread.sleep(random.nextInt(10000));
application.stop();
}
}
@Test(timeout = 10000L)
public void testFLUME2786() throws Exception {
final String agentName = "test";
final int interval = 1;
final long intervalMs = 1000L;
File configFile = new File(baseDir, "flume-conf.properties");
Files.copy(new File(getClass().getClassLoader()
.getResource("flume-conf.properties.2786").getFile()), configFile);
File mockConfigFile = spy(configFile);
when(mockConfigFile.lastModified()).then(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
Thread.sleep(intervalMs);
return System.currentTimeMillis();
}
});
EventBus eventBus = new EventBus(agentName + "-event-bus");
PollingPropertiesFileConfigurationProvider configurationProvider =
new PollingPropertiesFileConfigurationProvider(agentName,
mockConfigFile, eventBus, interval);
PollingPropertiesFileConfigurationProvider mockConfigurationProvider =
spy(configurationProvider);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Thread.sleep(intervalMs);
invocation.callRealMethod();
return null;
}
}).when(mockConfigurationProvider).stop();
List<LifecycleAware> components = Lists.newArrayList();
components.add(mockConfigurationProvider);
Application application = new Application(components);
eventBus.register(application);
application.start();
Thread.sleep(1500L);
application.stop();
}
@Test
public void testFlumeInit() throws Exception {
File configFile = new File(baseDir, "flume-conf-init.properties");
Files.copy(new File(getClass().getClassLoader()
.getResource("flume-conf-init.properties").getFile()), configFile);
ConfigurationSource source = new FileConfigurationSource(configFile.toURI());
List<ConfigurationSource> sourceList = new ArrayList<>();
sourceList.add(source);
UriConfigurationProvider configurationProvider =
new UriConfigurationProvider("host1", sourceList, null, null, 1);
List<LifecycleAware> components = Lists.newArrayList();
Application application = new Application(components);
MaterializedConfiguration configuration = configurationProvider.getConfiguration();
Assert.assertNotNull("Unable to create configuration", configuration);
application.handleConfigurationEvent(configuration);
application.start();
Map<String, Channel> channels = configuration.getChannels();
Channel channel = channels.get("processedChannel");
Assert.assertNotNull("Channel not found", channel);
Map<String, SourceRunner> sourceRunners = configuration.getSourceRunners();
Assert.assertNotNull("No source runners", sourceRunners);
SourceRunner runner = sourceRunners.get("source1");
Assert.assertNotNull("No source runner", runner);
EventProcessor processor = (EventProcessor) runner.getSource();
long[] expected = new long[]{1, 3, 6, 10, 15};
for (int i = 0; i < 5; ++i) {
Event event = new SimpleEvent();
event.setBody(Long.toString(i + 1).getBytes(StandardCharsets.UTF_8));
processor.processEvent(event);
}
Thread.sleep(500);
for (int i = 0; i < 5; ++i) {
Event event = getEvent(channel);
Assert.assertNotNull("No event returned on iteration " + i, event);
String val = event.getHeaders().get("Total");
Assert.assertNotNull("No Total in event " + i, val);
long total = Long.parseLong(val);
Assert.assertEquals(expected[i], total);
}
application.stop();
}
private Event getEvent(Channel channel) {
Transaction transaction = channel.getTransaction();
Event event = null;
try {
transaction.begin();
event = channel.take();
transaction.commit();
} catch (Exception ex) {
transaction.rollback();
Assert.fail("Failed to retrieve Flume Event: " + ex.getMessage());
} finally {
transaction.close();
}
return event;
}
}
| 9,572 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestOverrideFile.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import com.google.common.collect.Lists;
import junit.framework.Assert;
public class TestOverrideFile {
private static final File TESTFILE = new File(
TestOverrideFile.class.getClassLoader()
.getResource("flume-conf-with-recursiveLookup.properties").getFile());
private static final File OVERRIDEFILE = new File(
TestOverrideFile.class.getClassLoader()
.getResource("flume-conf-override.properties").getFile());
private static final String BIND = "192.168.13.101";
@Rule
public final EnvironmentVariables environmentVariables = new EnvironmentVariables();
private UriConfigurationProvider provider;
@Before
public void setUp() throws Exception {
System.setProperty("env", "DEV");
List<ConfigurationSource> sourceList =
Lists.newArrayList(new FileConfigurationSource(TESTFILE.toURI()),
new FileConfigurationSource(OVERRIDEFILE.toURI()));
provider = new UriConfigurationProvider("a1", sourceList, null,
null, 0);
}
@Test
public void getProperty() throws Exception {
Assert.assertEquals(BIND, provider.getFlumeConfiguration()
.getConfigurationFor("a1")
.getSourceContext().get("r1").getParameters().get("bind"));
}
}
| 9,573 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestEnvLookup.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import com.google.common.collect.Lists;
import junit.framework.Assert;
public class TestEnvLookup {
private static final File TESTFILE = new File(
TestEnvLookup.class.getClassLoader()
.getResource("flume-conf-with-envLookup.properties").getFile());
private static final String NC_PORT = "6667";
@Rule
public final EnvironmentVariables environmentVariables = new EnvironmentVariables();
private UriConfigurationProvider provider;
@Before
public void setUp() throws Exception {
environmentVariables.set("NC_PORT", NC_PORT);
List<ConfigurationSource> sourceList =
Lists.newArrayList(new FileConfigurationSource(TESTFILE.toURI()));
provider = new UriConfigurationProvider("a1", sourceList, null,
null, 0);
}
@Test
public void getProperty() throws Exception {
Assert.assertEquals(NC_PORT, provider.getFlumeConfiguration()
.getConfigurationFor("a1")
.getSourceContext().get("r1").getParameters().get("port"));
}
}
| 9,574 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestAbstractConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import com.google.common.collect.Maps;
import junit.framework.Assert;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.annotations.Disposable;
import org.apache.flume.annotations.Recyclable;
import org.apache.flume.channel.AbstractChannel;
import org.apache.flume.conf.Configurable;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.sink.AbstractSink;
import org.apache.flume.source.AbstractSource;
import org.junit.Test;
import java.util.Map;
public class TestAbstractConfigurationProvider {
@Test
public void testDispoableChannel() throws Exception {
String agentName = "agent1";
Map<String, String> properties = getPropertiesForChannel(agentName,
DisposableChannel.class.getName());
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config1 = provider.getConfiguration();
Channel channel1 = config1.getChannels().values().iterator().next();
Assert.assertTrue(channel1 instanceof DisposableChannel);
MaterializedConfiguration config2 = provider.getConfiguration();
Channel channel2 = config2.getChannels().values().iterator().next();
Assert.assertTrue(channel2 instanceof DisposableChannel);
Assert.assertNotSame(channel1, channel2);
}
@Test
public void testReusableChannel() throws Exception {
String agentName = "agent1";
Map<String, String> properties = getPropertiesForChannel(agentName,
RecyclableChannel.class.getName());
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config1 = provider.getConfiguration();
Channel channel1 = config1.getChannels().values().iterator().next();
Assert.assertTrue(channel1 instanceof RecyclableChannel);
MaterializedConfiguration config2 = provider.getConfiguration();
Channel channel2 = config2.getChannels().values().iterator().next();
Assert.assertTrue(channel2 instanceof RecyclableChannel);
Assert.assertSame(channel1, channel2);
}
@Test
public void testUnspecifiedChannel() throws Exception {
String agentName = "agent1";
Map<String, String> properties = getPropertiesForChannel(agentName,
UnspecifiedChannel.class.getName());
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config1 = provider.getConfiguration();
Channel channel1 = config1.getChannels().values().iterator().next();
Assert.assertTrue(channel1 instanceof UnspecifiedChannel);
MaterializedConfiguration config2 = provider.getConfiguration();
Channel channel2 = config2.getChannels().values().iterator().next();
Assert.assertTrue(channel2 instanceof UnspecifiedChannel);
Assert.assertSame(channel1, channel2);
}
@Test
public void testReusableChannelNotReusedLater() throws Exception {
String agentName = "agent1";
Map<String, String> propertiesReusable = getPropertiesForChannel(agentName,
RecyclableChannel.class
.getName());
Map<String, String> propertiesDispoable = getPropertiesForChannel(agentName,
DisposableChannel.class
.getName());
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, propertiesReusable);
MaterializedConfiguration config1 = provider.getConfiguration();
Channel channel1 = config1.getChannels().values().iterator().next();
Assert.assertTrue(channel1 instanceof RecyclableChannel);
provider.setProperties(propertiesDispoable);
MaterializedConfiguration config2 = provider.getConfiguration();
Channel channel2 = config2.getChannels().values().iterator().next();
Assert.assertTrue(channel2 instanceof DisposableChannel);
provider.setProperties(propertiesReusable);
MaterializedConfiguration config3 = provider.getConfiguration();
Channel channel3 = config3.getChannels().values().iterator().next();
Assert.assertTrue(channel3 instanceof RecyclableChannel);
Assert.assertNotSame(channel1, channel3);
}
@Test
public void testSourceThrowsExceptionDuringConfiguration() throws Exception {
String agentName = "agent1";
String sourceType = UnconfigurableSource.class.getName();
String channelType = "memory";
String sinkType = "null";
Map<String, String> properties = getProperties(agentName, sourceType,
channelType, sinkType);
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 0);
Assert.assertTrue(config.getChannels().size() == 1);
Assert.assertTrue(config.getSinkRunners().size() == 1);
}
@Test
public void testChannelThrowsExceptionDuringConfiguration() throws Exception {
String agentName = "agent1";
String sourceType = "seq";
String channelType = UnconfigurableChannel.class.getName();
String sinkType = "null";
Map<String, String> properties = getProperties(agentName, sourceType,
channelType, sinkType);
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 0);
Assert.assertTrue(config.getChannels().size() == 0);
Assert.assertTrue(config.getSinkRunners().size() == 0);
}
@Test
public void testSinkThrowsExceptionDuringConfiguration() throws Exception {
String agentName = "agent1";
String sourceType = "seq";
String channelType = "memory";
String sinkType = UnconfigurableSink.class.getName();
Map<String, String> properties = getProperties(agentName, sourceType,
channelType, sinkType);
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 1);
Assert.assertTrue(config.getChannels().size() == 1);
Assert.assertTrue(config.getSinkRunners().size() == 0);
}
@Test
public void testSourceAndSinkThrowExceptionDuringConfiguration()
throws Exception {
String agentName = "agent1";
String sourceType = UnconfigurableSource.class.getName();
String channelType = "memory";
String sinkType = UnconfigurableSink.class.getName();
Map<String, String> properties = getProperties(agentName, sourceType,
channelType, sinkType);
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 0);
Assert.assertTrue(config.getChannels().size() == 0);
Assert.assertTrue(config.getSinkRunners().size() == 0);
}
@Test
public void testSinkSourceMismatchDuringConfiguration() throws Exception {
String agentName = "agent1";
String sourceType = "seq";
String channelType = "memory";
String sinkType = "avro";
Map<String, String> properties = getProperties(agentName, sourceType,
channelType, sinkType);
properties.put(agentName + ".channels.channel1.capacity", "1000");
properties.put(agentName + ".channels.channel1.transactionCapacity", "1000");
properties.put(agentName + ".sources.source1.batchSize", "1000");
properties.put(agentName + ".sinks.sink1.batch-size", "1000");
properties.put(agentName + ".sinks.sink1.hostname", "10.10.10.10");
properties.put(agentName + ".sinks.sink1.port", "1010");
MemoryConfigurationProvider provider =
new MemoryConfigurationProvider(agentName, properties);
MaterializedConfiguration config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 1);
Assert.assertTrue(config.getChannels().size() == 1);
Assert.assertTrue(config.getSinkRunners().size() == 1);
properties.put(agentName + ".sources.source1.batchSize", "1001");
properties.put(agentName + ".sinks.sink1.batch-size", "1000");
provider = new MemoryConfigurationProvider(agentName, properties);
config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 0);
Assert.assertTrue(config.getChannels().size() == 1);
Assert.assertTrue(config.getSinkRunners().size() == 1);
properties.put(agentName + ".sources.source1.batchSize", "1000");
properties.put(agentName + ".sinks.sink1.batch-size", "1001");
provider = new MemoryConfigurationProvider(agentName, properties);
config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 1);
Assert.assertTrue(config.getChannels().size() == 1);
Assert.assertTrue(config.getSinkRunners().size() == 0);
properties.put(agentName + ".sources.source1.batchSize", "1001");
properties.put(agentName + ".sinks.sink1.batch-size", "1001");
provider = new MemoryConfigurationProvider(agentName, properties);
config = provider.getConfiguration();
Assert.assertTrue(config.getSourceRunners().size() == 0);
Assert.assertTrue(config.getChannels().size() == 0);
Assert.assertTrue(config.getSinkRunners().size() == 0);
}
private Map<String, String> getProperties(String agentName,
String sourceType, String channelType,
String sinkType) {
Map<String, String> properties = Maps.newHashMap();
properties.put(agentName + ".sources", "source1");
properties.put(agentName + ".channels", "channel1");
properties.put(agentName + ".sinks", "sink1");
properties.put(agentName + ".sources.source1.type", sourceType);
properties.put(agentName + ".sources.source1.channels", "channel1");
properties.put(agentName + ".channels.channel1.type", channelType);
properties.put(agentName + ".channels.channel1.capacity", "100");
properties.put(agentName + ".sinks.sink1.type", sinkType);
properties.put(agentName + ".sinks.sink1.channel", "channel1");
return properties;
}
private Map<String, String> getPropertiesForChannel(String agentName, String channelType) {
return getProperties(agentName, "seq", channelType, "null");
}
public static class MemoryConfigurationProvider extends AbstractConfigurationProvider {
private Map<String, String> properties;
public MemoryConfigurationProvider(String agentName, Map<String, String> properties) {
super(agentName);
this.properties = properties;
}
public void setProperties(Map<String, String> properties) {
this.properties = properties;
}
@Override
protected FlumeConfiguration getFlumeConfiguration() {
return new FlumeConfiguration(properties);
}
}
@Disposable
public static class DisposableChannel extends AbstractChannel {
@Override
public void put(Event event) throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Event take() throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Transaction getTransaction() {
throw new UnsupportedOperationException();
}
}
@Recyclable
public static class RecyclableChannel extends AbstractChannel {
@Override
public void put(Event event) throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Event take() throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Transaction getTransaction() {
throw new UnsupportedOperationException();
}
}
public static class UnspecifiedChannel extends AbstractChannel {
@Override
public void put(Event event) throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Event take() throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Transaction getTransaction() {
throw new UnsupportedOperationException();
}
}
public static class UnconfigurableChannel extends AbstractChannel {
@Override
public void configure(Context context) {
throw new RuntimeException("expected");
}
@Override
public void put(Event event) throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Event take() throws ChannelException {
throw new UnsupportedOperationException();
}
@Override
public Transaction getTransaction() {
throw new UnsupportedOperationException();
}
}
public static class UnconfigurableSource extends AbstractSource implements Configurable {
@Override
public void configure(Context context) {
throw new RuntimeException("expected");
}
}
public static class UnconfigurableSink extends AbstractSink implements Configurable {
@Override
public void configure(Context context) {
throw new RuntimeException("expected");
}
@Override
public Status process() throws EventDeliveryException {
throw new UnsupportedOperationException();
}
}
}
| 9,575 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestRecursiveLookup.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import com.google.common.collect.Lists;
import junit.framework.Assert;
public class TestRecursiveLookup {
private static final File TESTFILE = new File(
TestRecursiveLookup.class.getClassLoader()
.getResource("flume-conf-with-recursiveLookup.properties").getFile());
private static final String BIND = "192.168.11.101";
@Rule
public final EnvironmentVariables environmentVariables = new EnvironmentVariables();
private UriConfigurationProvider provider;
@Before
public void setUp() throws Exception {
System.setProperty("env", "DEV");
List<ConfigurationSource> sourceList =
Lists.newArrayList(new FileConfigurationSource(TESTFILE.toURI()));
provider = new UriConfigurationProvider("a1", sourceList, null,
null, 0);
}
@Test
public void getProperty() throws Exception {
Assert.assertEquals(BIND, provider.getFlumeConfiguration()
.getConfigurationFor("a1")
.getSourceContext().get("r1").getParameters().get("bind"));
}
}
| 9,576 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestMapResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.io.FileInputStream;
import java.util.Map;
import java.util.Properties;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Tests the MapResolver.
*/
public class TestMapResolver {
public static final String TEST_CONST = "Apache Flume";
private static final String TEST_PROPS = "target/test-classes/map-resolver.properties";
private static final String NAME_VALUE = "FLUME";
@After
public void after() {
System.clearProperty("lookups");
}
@Test
public void testDefaultResolver() throws Exception {
Properties props = new Properties();
props.load(new FileInputStream(TEST_PROPS));
System.setProperty("name", NAME_VALUE);
Map<String, String> properties = MapResolver.resolveProperties(props);
String name = properties.get("name");
assertNotNull("No name property", name);
assertEquals("Incorrect system property resolution", NAME_VALUE, name);
String testStr = properties.get("const");
assertNotNull("No const property", testStr);
assertTrue("Constant was resolved", testStr.contains("${const:"));
String version = properties.get("version");
assertNotNull("No Java property", version);
assertFalse("Java lookup was not resolved", version.contains("${java:"));
}
@Test
public void testCustomResolver() throws Exception {
Properties props = new Properties();
props.load(new FileInputStream(TEST_PROPS));
System.setProperty("name", NAME_VALUE);
System.setProperty("lookups", "test-lookups.properties");
Map<String, String> properties = MapResolver.resolveProperties(props);
String name = properties.get("name");
assertNotNull("No name property", name);
assertEquals("Incorrect system property resolution", NAME_VALUE, name);
String testStr = properties.get("const");
assertNotNull("No const property", testStr);
assertTrue("Constant was resolved", testStr.contains("${const:"));
String version = properties.get("version");
assertNotNull("No Java property", version);
assertFalse("Java lookup was not resolved", version.contains("${java:"));
String test = properties.get("test");
assertNotNull("No Test property", version);
assertEquals("Test lookup was not resolved", "Value", test);
}
}
| 9,577 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestStaticZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import org.junit.Test;
public class TestStaticZooKeeperConfigurationProvider extends
TestAbstractZooKeeperConfigurationProvider {
private StaticZooKeeperConfigurationProvider configurationProvider;
@Override
protected void doSetUp() throws Exception {
addData();
configurationProvider = new StaticZooKeeperConfigurationProvider(
AGENT_NAME, "localhost:" + zkServer.getPort(), null);
}
@Override
protected void doTearDown() throws Exception {
// do nothing
}
@Test
public void testPropertyRead() throws Exception {
verifyProperties(configurationProvider);
}
} | 9,578 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestAbstractZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import junit.framework.Assert;
import org.apache.commons.io.IOUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.curator.test.TestingServer;
import org.apache.curator.utils.EnsurePath;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.conf.FlumeConfigurationError;
import org.junit.After;
import org.junit.Before;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public abstract class TestAbstractZooKeeperConfigurationProvider {
private static final String FLUME_CONF_FILE = "flume-conf.properties";
protected static final String AGENT_NAME = "a1";
protected static final String AGENT_PATH =
AbstractZooKeeperConfigurationProvider.DEFAULT_ZK_BASE_PATH + "/" + AGENT_NAME;
protected TestingServer zkServer;
protected CuratorFramework client;
@Before
public void setUp() throws Exception {
zkServer = new TestingServer();
client = CuratorFrameworkFactory
.newClient("localhost:" + zkServer.getPort(),
new ExponentialBackoffRetry(1000, 3));
client.start();
EnsurePath ensurePath = new EnsurePath(AGENT_PATH);
ensurePath.ensure(client.getZookeeperClient());
doSetUp();
}
protected abstract void doSetUp() throws Exception;
@After
public void tearDown() throws Exception {
doTearDown();
zkServer.close();
client.close();
}
protected abstract void doTearDown() throws Exception;
protected void addData() throws Exception {
Reader in = new InputStreamReader(getClass().getClassLoader()
.getResourceAsStream(FLUME_CONF_FILE), Charsets.UTF_8);
try {
String config = IOUtils.toString(in);
client.setData().forPath(AGENT_PATH, config.getBytes());
} finally {
in.close();
}
}
protected void verifyProperties(AbstractConfigurationProvider cp) {
FlumeConfiguration configuration = cp.getFlumeConfiguration();
Assert.assertNotNull(configuration);
/*
* Test the known errors in the file
*/
List<String> expected = Lists.newArrayList();
expected.add("host5 CONFIG_ERROR");
expected.add("host5 INVALID_PROPERTY");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 AGENT_CONFIGURATION_INVALID");
expected.add("ch2 ATTRS_MISSING");
expected.add("host3 CONFIG_ERROR");
expected.add("host3 PROPERTY_VALUE_NULL");
expected.add("host3 AGENT_CONFIGURATION_INVALID");
expected.add("host2 PROPERTY_VALUE_NULL");
expected.add("host2 AGENT_CONFIGURATION_INVALID");
List<String> actual = Lists.newArrayList();
for (FlumeConfigurationError error : configuration.getConfigurationErrors()) {
actual.add(error.getComponentName() + " " + error.getErrorType().toString());
}
Collections.sort(expected);
Collections.sort(actual);
Assert.assertEquals(expected, actual);
FlumeConfiguration.AgentConfiguration agentConfiguration = configuration
.getConfigurationFor("host1");
Assert.assertNotNull(agentConfiguration);
Set<String> sources = Sets.newHashSet("source1");
Set<String> sinks = Sets.newHashSet("sink1");
Set<String> channels = Sets.newHashSet("channel1");
Assert.assertEquals(sources, agentConfiguration.getSourceSet());
Assert.assertEquals(sinks, agentConfiguration.getSinkSet());
Assert.assertEquals(channels, agentConfiguration.getChannelSet());
}
}
| 9,579 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestPropertiesFileConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import junit.framework.Assert;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.conf.FlumeConfiguration.AgentConfiguration;
import org.apache.flume.conf.FlumeConfigurationError;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class TestPropertiesFileConfigurationProvider {
private static final Logger LOGGER =
LoggerFactory.getLogger(TestPropertiesFileConfigurationProvider.class);
private static final File TESTFILE = new File(
TestPropertiesFileConfigurationProvider.class.getClassLoader()
.getResource("flume-conf.properties").getFile());
private UriConfigurationProvider provider;
private List<ConfigurationSource> sources;
@Before
public void setUp() throws Exception {
ConfigurationSource source = new FileConfigurationSource(TESTFILE.toURI());
sources = Lists.newArrayList(source);
provider = new UriConfigurationProvider("test", sources, null, null, 0);
provider.start();
}
@After
public void tearDown() throws Exception {
}
@Test
public void testPropertyRead() throws Exception {
FlumeConfiguration configuration = provider.getFlumeConfiguration();
Assert.assertNotNull(configuration);
/*
* Test the known errors in the file
*/
List<String> expected = Lists.newArrayList();
expected.add("host5 CONFIG_ERROR");
expected.add("host5 INVALID_PROPERTY");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 CONFIG_ERROR");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 PROPERTY_VALUE_NULL");
expected.add("host4 AGENT_CONFIGURATION_INVALID");
expected.add("ch2 ATTRS_MISSING");
expected.add("host3 CONFIG_ERROR");
expected.add("host3 PROPERTY_VALUE_NULL");
expected.add("host3 AGENT_CONFIGURATION_INVALID");
expected.add("host2 PROPERTY_VALUE_NULL");
expected.add("host2 AGENT_CONFIGURATION_INVALID");
List<String> actual = Lists.newArrayList();
for (FlumeConfigurationError error : configuration.getConfigurationErrors()) {
actual.add(error.getComponentName() + " " + error.getErrorType().toString());
}
Collections.sort(expected);
Collections.sort(actual);
Assert.assertEquals(expected, actual);
AgentConfiguration agentConfiguration =
configuration.getConfigurationFor("host1");
Assert.assertNotNull(agentConfiguration);
LOGGER.info(agentConfiguration.getPrevalidationConfig());
LOGGER.info(agentConfiguration.getPostvalidationConfig());
Set<String> sources = Sets.newHashSet("source1");
Set<String> sinks = Sets.newHashSet("sink1");
Set<String> channels = Sets.newHashSet("channel1");
Assert.assertEquals(sources, agentConfiguration.getSourceSet());
Assert.assertEquals(sinks, agentConfiguration.getSinkSet());
Assert.assertEquals(channels, agentConfiguration.getChannelSet());
}
}
| 9,580 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/TestHttpConfigurationSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.util.Base64;
import java.util.Enumeration;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.flume.conf.ConfigurationException;
import org.apache.flume.node.net.AuthorizationProvider;
import org.apache.flume.node.net.BasicAuthorizationProvider;
import org.eclipse.jetty.http.HttpHeader;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests that files can be loaded via http.
*/
public class TestHttpConfigurationSource {
private static final Logger LOGGER = LoggerFactory.getLogger(TestHttpConfigurationSource.class);
private static final String BASIC = "Basic ";
private static final String expectedCreds = "flume:flume";
private static Server server;
private static Base64.Decoder decoder = Base64.getDecoder();
private static int port;
@BeforeClass
public static void startServer() throws Exception {
try {
server = new Server(0);
ServletContextHandler context = new ServletContextHandler();
ServletHolder defaultServ = new ServletHolder("default", TestServlet.class);
defaultServ.setInitParameter("resourceBase", System.getProperty("user.dir"));
defaultServ.setInitParameter("dirAllowed", "true");
context.addServlet(defaultServ, "/");
server.setHandler(context);
// Start Server
server.start();
port = ((ServerConnector) server.getConnectors()[0]).getLocalPort();
} catch (Throwable ex) {
ex.printStackTrace();
throw ex;
}
}
@AfterClass
public static void stopServer() throws Exception {
server.stop();
}
@Test(expected = ConfigurationException.class)
public void testBadCrdentials() throws Exception {
URI confFile = new URI("http://localhost:" + port + "/flume-conf.properties");
AuthorizationProvider authProvider = new BasicAuthorizationProvider("foo", "bar");
ConfigurationSource source = new HttpConfigurationSource(confFile, authProvider, true);
}
@Test
public void testGet() throws Exception {
URI confFile = new URI("http://localhost:" + port + "/flume-conf.properties");
AuthorizationProvider authProvider = new BasicAuthorizationProvider("flume", "flume");
ConfigurationSource source = new HttpConfigurationSource(confFile, authProvider, true);
Assert.assertNotNull("No configuration returned", source);
InputStream is = source.getInputStream();
Assert.assertNotNull("No data returned", is);
Properties props = new Properties();
props.load(is);
String value = props.getProperty("host1.sources");
Assert.assertNotNull("Missing key", value);
Assert.assertFalse(source.isModified());
File file = new File("target/test-classes/flume-conf.properties");
if (file.setLastModified(System.currentTimeMillis())) {
Assert.assertTrue(source.isModified());
}
}
public static class TestServlet extends DefaultServlet {
private static final long serialVersionUID = -2885158530511450659L;
@Override
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
Enumeration<String> headers = request.getHeaders(HttpHeader.AUTHORIZATION.toString());
if (headers == null) {
response.sendError(401, "No Auth header");
return;
}
while (headers.hasMoreElements()) {
String authData = headers.nextElement();
Assert.assertTrue("Not a Basic auth header", authData.startsWith(BASIC));
String credentials = new String(decoder.decode(authData.substring(BASIC.length())));
if (!expectedCreds.equals(credentials)) {
response.sendError(401, "Invalid credentials");
return;
}
}
if (request.getServletPath().equals("/flume-conf.properties")) {
File file = new File("target/test-classes/flume-conf.properties");
long modifiedSince = request.getDateHeader(HttpHeader.IF_MODIFIED_SINCE.toString());
long lastModified = (file.lastModified() / 1000) * 1000;
LOGGER.debug("LastModified: {}, modifiedSince: {}", lastModified, modifiedSince);
if (modifiedSince > 0 && lastModified <= modifiedSince) {
response.setStatus(304);
return;
}
response.setDateHeader(HttpHeader.LAST_MODIFIED.toString(), lastModified);
response.setContentLengthLong(file.length());
Files.copy(file.toPath(), response.getOutputStream());
response.getOutputStream().flush();
response.setStatus(200);
} else {
response.sendError(400, "Unsupported request");
}
}
}
}
| 9,581 |
0 | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node | Create_ds/flume/flume-ng-node/src/test/java/org/apache/flume/node/lookup/TestLookup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node.lookup;
import org.apache.commons.text.lookup.StringLookup;
/**
* Test Lookup.
*/
public class TestLookup implements StringLookup {
@Override
public String lookup(String key) {
return key;
}
}
| 9,582 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/FileConfigurationSourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.net.URI;
import java.util.List;
import org.apache.flume.node.net.AuthorizationProvider;
import com.google.common.collect.Lists;
/**
* Creates a FileConfigurationSource.
*/
public class FileConfigurationSourceFactory implements ConfigurationSourceFactory {
@SuppressWarnings(value = {"EI_EXPOSE_REP"})
private static final List<String> SCHEMES = Lists.newArrayList("file");
public List<String> getSchemes() {
return SCHEMES;
}
public ConfigurationSource createConfigurationSource(URI uri,
AuthorizationProvider authorizationProvider, boolean verifyHost) {
return new FileConfigurationSource(uri);
}
}
| 9,583 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/AbstractZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.IOException;
import java.io.StringReader;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import com.google.common.base.Charsets;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.flume.conf.FlumeConfiguration;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
/**
* ZooKeeper based configuration implementation provider.
*
* The Agent configuration can be uploaded in ZooKeeper under a base name, which
* defaults to /flume
*
* Currently the agent configuration is stored under the agent name node in
* ZooKeeper
*
* <PRE>
* /flume
* /a1 [agent config file]
* /a2 [agent config file]
* /a3 [agent config file]
* </PRE>
*
* Configuration format is same as PropertiesFileConfigurationProvider
*
* Configuration properties
*
* agentName - Name of Agent for which configuration needs to be pulled
*
* zkConnString - Connection string to ZooKeeper Ensemble
* (host:port,host1:port1)
*
* basePath - Base Path where agent configuration needs to be stored. Defaults
* to /flume
*/
public abstract class AbstractZooKeeperConfigurationProvider extends
AbstractConfigurationProvider {
static final String DEFAULT_ZK_BASE_PATH = "/flume";
protected final String basePath;
protected final String zkConnString;
protected AbstractZooKeeperConfigurationProvider(String agentName,
String zkConnString, String basePath) {
super(agentName);
Preconditions.checkArgument(!Strings.isNullOrEmpty(zkConnString),
"Invalid Zookeeper Connection String %s", zkConnString);
this.zkConnString = zkConnString;
if (basePath == null || basePath.isEmpty()) {
this.basePath = DEFAULT_ZK_BASE_PATH;
} else {
this.basePath = basePath;
}
}
protected CuratorFramework createClient() {
return CuratorFrameworkFactory.newClient(zkConnString,
new ExponentialBackoffRetry(1000, 1));
}
protected FlumeConfiguration configFromBytes(byte[] configData)
throws IOException {
Map<String, String> configMap;
if (configData == null || configData.length == 0) {
configMap = Collections.emptyMap();
} else {
String fileContent = new String(configData, Charsets.UTF_8);
Properties properties = new Properties();
properties.load(new StringReader(fileContent));
configMap = toMap(properties);
}
return new FlumeConfiguration(configMap);
}
}
| 9,584 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/Application.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Sink;
import org.apache.flume.SinkProcessor;
import org.apache.flume.SinkRunner;
import org.apache.flume.Source;
import org.apache.flume.SourceRunner;
import org.apache.flume.instrumentation.MonitorService;
import org.apache.flume.instrumentation.MonitoringType;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.lifecycle.LifecycleState;
import org.apache.flume.lifecycle.LifecycleSupervisor;
import org.apache.flume.lifecycle.LifecycleSupervisor.SupervisorPolicy;
import org.apache.flume.node.net.AuthorizationProvider;
import org.apache.flume.node.net.BasicAuthorizationProvider;
import org.apache.flume.sink.AbstractSingleSinkProcessor;
import org.apache.flume.sink.AbstractSinkProcessor;
import org.apache.flume.util.SSLUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
public class Application {
private static final Logger logger = LoggerFactory
.getLogger(Application.class);
public static final String CONF_MONITOR_CLASS = "flume.monitoring.type";
public static final String CONF_MONITOR_PREFIX = "flume.monitoring.";
private static final int DEFAULT_INTERVAL = 300;
private static final int DEFAULT_FILE_INTERVAL = 30;
private final List<LifecycleAware> components;
private final LifecycleSupervisor supervisor;
private MaterializedConfiguration materializedConfiguration;
private MonitorService monitorServer;
private final ReentrantLock lifecycleLock = new ReentrantLock();
public Application() {
this(new ArrayList<LifecycleAware>(0));
}
public Application(List<LifecycleAware> components) {
this.components = components;
supervisor = new LifecycleSupervisor();
}
public void start() {
lifecycleLock.lock();
try {
for (LifecycleAware component : components) {
supervisor.supervise(component,
new SupervisorPolicy.AlwaysRestartPolicy(), LifecycleState.START);
}
} finally {
lifecycleLock.unlock();
}
}
@Subscribe
public void handleConfigurationEvent(MaterializedConfiguration conf) {
try {
lifecycleLock.lockInterruptibly();
stopAllComponents();
initializeAllComponents(conf);
startAllComponents(conf);
} catch (InterruptedException e) {
logger.info("Interrupted while trying to handle configuration event");
return;
} finally {
// If interrupted while trying to lock, we don't own the lock, so must not attempt to unlock
if (lifecycleLock.isHeldByCurrentThread()) {
lifecycleLock.unlock();
}
}
}
public void stop() {
lifecycleLock.lock();
try {
stopAllComponents();
supervisor.stop();
if (monitorServer != null) {
monitorServer.stop();
}
} finally {
lifecycleLock.unlock();
}
}
private void stopAllComponents() {
if (this.materializedConfiguration != null) {
logger.info("Shutting down configuration: {}", this.materializedConfiguration);
for (Entry<String, SourceRunner> entry :
this.materializedConfiguration.getSourceRunners().entrySet()) {
try {
logger.info("Stopping Source " + entry.getKey());
supervisor.unsupervise(entry.getValue());
} catch (Exception e) {
logger.error("Error while stopping {}", entry.getValue(), e);
}
}
for (Entry<String, SinkRunner> entry :
this.materializedConfiguration.getSinkRunners().entrySet()) {
try {
logger.info("Stopping Sink " + entry.getKey());
supervisor.unsupervise(entry.getValue());
} catch (Exception e) {
logger.error("Error while stopping {}", entry.getValue(), e);
}
}
for (Entry<String, Channel> entry :
this.materializedConfiguration.getChannels().entrySet()) {
try {
logger.info("Stopping Channel " + entry.getKey());
supervisor.unsupervise(entry.getValue());
} catch (Exception e) {
logger.error("Error while stopping {}", entry.getValue(), e);
}
}
}
if (monitorServer != null) {
monitorServer.stop();
}
}
private void initializeAllComponents(MaterializedConfiguration materializedConfiguration) {
logger.info("Initializing components");
for (Channel ch : materializedConfiguration.getChannels().values()) {
while (ch.getLifecycleState() != LifecycleState.START && ch instanceof Initializable) {
((Initializable) ch).initialize(materializedConfiguration);
}
}
for (SinkRunner sinkRunner : materializedConfiguration.getSinkRunners().values()) {
SinkProcessor processor = sinkRunner.getPolicy();
if (processor instanceof AbstractSingleSinkProcessor) {
Sink sink = ((AbstractSingleSinkProcessor) processor).getSink();
if (sink instanceof Initializable) {
((Initializable) sink).initialize(materializedConfiguration);
}
} else if (processor instanceof AbstractSinkProcessor) {
for (Sink sink : ((AbstractSinkProcessor) processor).getSinks()) {
if (sink instanceof Initializable) {
((Initializable) sink).initialize(materializedConfiguration);
}
}
}
}
for (SourceRunner sourceRunner : materializedConfiguration.getSourceRunners().values()) {
Source source = sourceRunner.getSource();
if (source instanceof Initializable) {
((Initializable) source).initialize(materializedConfiguration);
}
}
}
private void startAllComponents(MaterializedConfiguration materializedConfiguration) {
logger.info("Starting new configuration:{}", materializedConfiguration);
this.materializedConfiguration = materializedConfiguration;
for (Entry<String, Channel> entry :
materializedConfiguration.getChannels().entrySet()) {
try {
logger.info("Starting Channel " + entry.getKey());
supervisor.supervise(entry.getValue(),
new SupervisorPolicy.AlwaysRestartPolicy(), LifecycleState.START);
} catch (Exception e) {
logger.error("Error while starting {}", entry.getValue(), e);
}
}
/*
* Wait for all channels to start.
*/
for (Channel ch : materializedConfiguration.getChannels().values()) {
while (ch.getLifecycleState() != LifecycleState.START
&& !supervisor.isComponentInErrorState(ch)) {
try {
logger.info("Waiting for channel: " + ch.getName() +
" to start. Sleeping for 500 ms");
Thread.sleep(500);
} catch (InterruptedException e) {
logger.error("Interrupted while waiting for channel to start.", e);
Throwables.propagate(e);
}
}
}
for (Entry<String, SinkRunner> entry : materializedConfiguration.getSinkRunners().entrySet()) {
try {
logger.info("Starting Sink " + entry.getKey());
supervisor.supervise(entry.getValue(),
new SupervisorPolicy.AlwaysRestartPolicy(), LifecycleState.START);
} catch (Exception e) {
logger.error("Error while starting {}", entry.getValue(), e);
}
}
for (Entry<String, SourceRunner> entry :
materializedConfiguration.getSourceRunners().entrySet()) {
try {
logger.info("Starting Source " + entry.getKey());
supervisor.supervise(entry.getValue(),
new SupervisorPolicy.AlwaysRestartPolicy(), LifecycleState.START);
} catch (Exception e) {
logger.error("Error while starting {}", entry.getValue(), e);
}
}
this.loadMonitoring();
}
@SuppressWarnings("unchecked")
private void loadMonitoring() {
Properties systemProps = System.getProperties();
Set<String> keys = systemProps.stringPropertyNames();
try {
if (keys.contains(CONF_MONITOR_CLASS)) {
String monitorType = systemProps.getProperty(CONF_MONITOR_CLASS);
Class<? extends MonitorService> klass;
try {
//Is it a known type?
klass = MonitoringType.valueOf(
monitorType.toUpperCase(Locale.ENGLISH)).getMonitorClass();
} catch (Exception e) {
//Not a known type, use FQCN
klass = (Class<? extends MonitorService>) Class.forName(monitorType);
}
this.monitorServer = klass.getConstructor().newInstance();
Context context = new Context();
for (String key : keys) {
if (key.startsWith(CONF_MONITOR_PREFIX)) {
context.put(key.substring(CONF_MONITOR_PREFIX.length()),
systemProps.getProperty(key));
}
}
monitorServer.configure(context);
monitorServer.start();
}
} catch (ReflectiveOperationException e) {
logger.warn("Error starting monitoring. "
+ "Monitoring might not be available.", e);
}
}
public static void main(String[] args) {
Properties initProps = loadConfigOpts();
try {
SSLUtil.initGlobalSSLParameters();
Options options = new Options();
Option option = new Option("n", "name", true, "the name of this agent");
option.setRequired(true);
options.addOption(option);
option = new Option("f", "conf-file", true,
"specify a config file (required if -c, -u, and -z are missing)");
option.setRequired(false);
options.addOption(option);
option = new Option("u", "conf-uri", true,
"specify a config uri (required if -c, -f and -z are missing)");
option.setRequired(false);
options.addOption(option);
option = new Option("a", "auth-provider", true,
"specify an authorization provider class");
option.setRequired(false);
options.addOption(option);
option = new Option("prov", "conf-provider", true,
"specify a configuration provider class (required if -f, -u, and -z are missing)");
option.setRequired(false);
options.addOption(option);
option = new Option("user", "conf-user", true, "user name to access configuration uri");
option.setRequired(false);
options.addOption(option);
option = new Option("pwd", "conf-password", true, "password to access configuration uri");
option.setRequired(false);
options.addOption(option);
option = new Option("i", "poll-interval", true,
"number of seconds between checks for a configuration change");
option.setRequired(false);
options.addOption(option);
option = new Option("b", "backup-directory", true,
"directory in which to store the backup configuration file");
option.setRequired(false);
options.addOption(option);
option = new Option(null, "no-reload-conf", false,
"do not reload config file if changed");
options.addOption(option);
// Options for Zookeeper
option = new Option("z", "zkConnString", true,
"specify the ZooKeeper connection to use (required if -c, -f, and -u are missing)");
option.setRequired(false);
options.addOption(option);
option = new Option("p", "zkBasePath", true,
"specify the base path in ZooKeeper for agent configs");
option.setRequired(false);
options.addOption(option);
option = new Option("h", "help", false, "display help text");
options.addOption(option);
DefaultParser parser = new DefaultParser();
CommandLine commandLine = parser.parse(options, args, initProps);
if (commandLine.hasOption('h')) {
new HelpFormatter().printHelp("flume-ng agent", options, true);
return;
}
String agentName = commandLine.getOptionValue('n');
boolean reload = !commandLine.hasOption("no-reload-conf");
boolean isZkConfigured = false;
if (commandLine.hasOption('z') || commandLine.hasOption("zkConnString")) {
isZkConfigured = true;
}
List<URI> confUri = null;
ConfigurationProvider provider = null;
int defaultInterval = DEFAULT_FILE_INTERVAL;
if (commandLine.hasOption('u') || commandLine.hasOption("conf-uri")) {
confUri = new ArrayList<>();
for (String uri : commandLine.getOptionValues("conf-uri")) {
if (uri.toLowerCase(Locale.ROOT).startsWith("http")) {
defaultInterval = DEFAULT_INTERVAL;
}
confUri.add(new URI(uri));
}
} else if (commandLine.hasOption("f") || commandLine.hasOption("conf-file")) {
confUri = new ArrayList<>();
for (String filePath : commandLine.getOptionValues("conf-file")) {
confUri.add(new File(filePath).toURI());
}
}
if (commandLine.hasOption("prov") || commandLine.hasOption("conf-provider")) {
String className = commandLine.getOptionValue("conf-provider");
try {
Class<?> clazz = Application.class.getClassLoader().loadClass(className);
Constructor<?> constructor = clazz.getConstructor(String[].class);
provider = (ConfigurationProvider) constructor.newInstance((Object[]) args);
} catch (ReflectiveOperationException ex) {
logger.error("Error creating ConfigurationProvider {}", className, ex);
}
}
Application application;
if (provider != null) {
List<LifecycleAware> components = Lists.newArrayList();
application = new Application(components);
application.handleConfigurationEvent(provider.getConfiguration());
} else if (isZkConfigured) {
// get options
String zkConnectionStr = commandLine.getOptionValue('z');
String baseZkPath = commandLine.getOptionValue('p');
if (reload) {
EventBus eventBus = new EventBus(agentName + "-event-bus");
List<LifecycleAware> components = Lists.newArrayList();
PollingZooKeeperConfigurationProvider zookeeperConfigurationProvider =
new PollingZooKeeperConfigurationProvider(
agentName, zkConnectionStr, baseZkPath, eventBus);
components.add(zookeeperConfigurationProvider);
application = new Application(components);
eventBus.register(application);
} else {
StaticZooKeeperConfigurationProvider zookeeperConfigurationProvider =
new StaticZooKeeperConfigurationProvider(
agentName, zkConnectionStr, baseZkPath);
application = new Application();
application.handleConfigurationEvent(zookeeperConfigurationProvider.getConfiguration());
}
} else if (confUri != null) {
String confUser = commandLine.getOptionValue("conf-user");
String confPassword = commandLine.getOptionValue("conf-password");
String pollInterval = commandLine.getOptionValue("poll-interval");
String backupDirectory = commandLine.getOptionValue("backup-directory");
int interval = StringUtils.isNotEmpty(pollInterval) ? Integer.parseInt(pollInterval) : 0;
String verify = commandLine.getOptionValue("verify-host", "true");
boolean verifyHost = Boolean.parseBoolean(verify);
AuthorizationProvider authorizationProvider = null;
String authProviderClass = commandLine.getOptionValue("auth-provider");
if (authProviderClass != null) {
try {
Class<?> clazz = Class.forName(authProviderClass);
Object obj = clazz.getDeclaredConstructor(String[].class)
.newInstance((Object[]) args);
if (obj instanceof AuthorizationProvider) {
authorizationProvider = (AuthorizationProvider) obj;
} else {
logger.error(
"The supplied authorization provider does not implement AuthorizationProvider");
return;
}
} catch (ReflectiveOperationException ex) {
logger.error("Unable to create authorization provider: {}", ex.getMessage());
return;
}
}
if (authorizationProvider == null && StringUtils.isNotEmpty(confUser)
&& StringUtils.isNotEmpty(confPassword)) {
authorizationProvider = new BasicAuthorizationProvider(confUser, confPassword);
}
EventBus eventBus = null;
if (reload) {
eventBus = new EventBus(agentName + "-event-bus");
if (interval == 0) {
interval = defaultInterval;
}
}
List<ConfigurationSource> configurationSources = new ArrayList<>();
for (URI uri : confUri) {
ConfigurationSource configurationSource =
ConfigurationSourceFactory.getConfigurationSource(uri, authorizationProvider,
verifyHost);
if (configurationSource != null) {
configurationSources.add(configurationSource);
}
}
List<LifecycleAware> components = Lists.newArrayList();
UriConfigurationProvider configurationProvider = new UriConfigurationProvider(agentName,
configurationSources, backupDirectory, eventBus, interval);
components.add(configurationProvider);
application = new Application(components);
if (eventBus != null) {
eventBus.register(application);
}
application.handleConfigurationEvent(configurationProvider.getConfiguration());
} else {
throw new ParseException("No configuiration was provided");
}
application.start();
final Application appReference = application;
Runtime.getRuntime().addShutdownHook(new Thread("agent-shutdown-hook") {
@Override
public void run() {
appReference.stop();
}
});
} catch (ParseException | URISyntaxException | RuntimeException e) {
logger.error("A fatal error occurred while running. Exception follows.", e);
}
}
@SuppressWarnings("PMD")
private static Properties loadConfigOpts() {
Properties initProps = new Properties();
InputStream is = null;
try {
is = new FileInputStream("/etc/flume/flume.opts");
} catch (IOException ex) {
// Ignore the exception.
}
if (is == null) {
is = Application.class.getClassLoader().getResourceAsStream("flume.opts");
}
if (is != null) {
try {
initProps.load(is);
} catch (Exception ex) {
logger.warn("Unable to load options file due to: {}", ex.getMessage());
} finally {
try {
is.close();
} catch (IOException ex) {
// Ignore this error.
}
}
}
return initProps;
}
} | 9,585 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/MapResolver.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import org.apache.commons.text.StringSubstitutor;
import org.apache.commons.text.lookup.DefaultStringLookup;
import org.apache.commons.text.lookup.StringLookup;
import org.apache.commons.text.lookup.StringLookupFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Resolves replaceable tokens to create a Map.
* <p>
* Needs org.apache.commons:commons-lang3 on classpath
*/
final class MapResolver {
private static final Logger LOGGER = LoggerFactory.getLogger(MapResolver.class);
private static final String DEFAULT_LOOKUPS = "lookups.properties";
private static final String CUSTOM_LOOKUPS_KEY = "lookups";
private static final String PROPS_IMPL_KEY = "propertiesImplementation";
private static final String ENV_VAR_PROPERTY = "org.apache.flume.node.EnvVarResolverProperties";
private static final String LOOKUP = "org.apache.commons.text.lookup.DefaultStringLookup.";
private static final LookupEntry[] LOOKUP_ENTRIES = {
new LookupEntry("sys", DefaultStringLookup.SYSTEM_PROPERTIES.getStringLookup()),
new LookupEntry("env", DefaultStringLookup.ENVIRONMENT.getStringLookup()) ,
new LookupEntry("java", DefaultStringLookup.JAVA.getStringLookup()),
new LookupEntry("date", DefaultStringLookup.DATE.getStringLookup())
};
public static Map<String, String> resolveProperties(Properties properties) {
Map<String, String> map = new HashMap<>();
boolean useEnvVars = ENV_VAR_PROPERTY.equals(System.getProperty(PROPS_IMPL_KEY));
StringLookup defaultLookup = useEnvVars ? new DefaultLookup(map) :
StringLookupFactory.INSTANCE.mapStringLookup(map);
StringLookup lookup = StringLookupFactory.INSTANCE.interpolatorStringLookup(createLookupMap(),
defaultLookup, false);
StringSubstitutor substitutor = new StringSubstitutor(lookup);
substitutor.setEnableSubstitutionInVariables(true);
properties.stringPropertyNames().forEach((k) -> map.put(k,
substitutor.replace(properties.getProperty(k))));
return map;
}
private static Map<String, StringLookup> createLookupMap() {
Map<String, StringLookup> map = new HashMap<>();
Properties properties = loadProperties();
if (properties == null) {
Arrays.stream(LOOKUP_ENTRIES).forEach((e) -> {
map.put(e.key, e.lookup);
});
} else {
properties.forEach((k, v) -> {
String key = Objects.toString(k);
String value = Objects.toString(v);
if (value.startsWith(LOOKUP)) {
String lookupEnum = value.substring(LOOKUP.length());
try {
StringLookup stringLookup = DefaultStringLookup.valueOf(lookupEnum).getStringLookup();
map.put(key.toLowerCase(Locale.ROOT), stringLookup);
} catch (IllegalArgumentException ex) {
LOGGER.warn("{} is not a DefaultStringLookup enum value, ignoring", key);
}
} else {
try {
Class<?> clazz = Class.forName(Objects.toString(v));
if (StringLookup.class.isAssignableFrom(clazz)) {
StringLookup stringLookup = (StringLookup) clazz.newInstance();
map.put(k.toString().toLowerCase(Locale.ROOT), stringLookup);
} else {
LOGGER.warn("{} is not a StringLookup, ignoring", v);
}
} catch (Exception ex) {
LOGGER.warn("Unable to load {} due to {}, ignoring", v, ex.getMessage());
}
}
});
}
return map;
}
private static class DefaultLookup implements StringLookup {
private final Map<String, String> properties;
DefaultLookup(Map<String, String> properties) {
this.properties = properties;
}
/**
* Provide compatibility with EnvVarResolverProperties.
*
* @param key The key.
* @return The value associated with the key or null.
*/
@Override
public String lookup(String key) {
return properties.containsKey(key) ?
properties.get(key) : System.getenv(key);
}
}
private static class LookupEntry {
private final String key;
private final StringLookup lookup;
public LookupEntry(String key, StringLookup lookup) {
this.key = key;
this.lookup = lookup;
}
}
private static Properties loadProperties() {
final Properties properties = new Properties();
String fileName = System.getProperty(CUSTOM_LOOKUPS_KEY);
if (fileName != null) {
try (InputStream inputStream = new FileInputStream(fileName)) {
properties.load(inputStream);
} catch (final IOException e) {
try (InputStream inputStream = ClassLoader.getSystemResourceAsStream(fileName)) {
properties.load(inputStream);
} catch (final IOException ex) {
LOGGER.warn("Unable to load {} due to {}", fileName, ex.getMessage());
}
}
}
if (properties.size() == 0) {
try (InputStream inputStream = ClassLoader.getSystemResourceAsStream(DEFAULT_LOOKUPS)) {
if (inputStream != null) {
properties.load(inputStream);
} else {
return null;
}
} catch (final IOException e) {
return null;
}
}
return properties;
}
}
| 9,586 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/HttpConfigurationSourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.net.URI;
import java.util.List;
import org.apache.flume.node.net.AuthorizationProvider;
import com.google.common.collect.Lists;
/**
* Creates an HttpConfigurationSource.
*/
public class HttpConfigurationSourceFactory implements ConfigurationSourceFactory {
@SuppressWarnings(value = {"EI_EXPOSE_REP"})
private static final List<String> SCHEMES = Lists.newArrayList("http", "https");
public List<String> getSchemes() {
return SCHEMES;
}
public ConfigurationSource createConfigurationSource(URI uri,
AuthorizationProvider authorizationProvider, boolean verifyHost) {
return new HttpConfigurationSource(uri, authorizationProvider, verifyHost);
}
}
| 9,587 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/AbstractConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import org.apache.flume.Channel;
import org.apache.flume.ChannelFactory;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.FlumeException;
import org.apache.flume.Sink;
import org.apache.flume.SinkFactory;
import org.apache.flume.SinkProcessor;
import org.apache.flume.SinkRunner;
import org.apache.flume.Source;
import org.apache.flume.SourceFactory;
import org.apache.flume.SourceRunner;
import org.apache.flume.annotations.Disposable;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.ChannelSelectorFactory;
import org.apache.flume.channel.DefaultChannelFactory;
import org.apache.flume.conf.BasicConfigurationConstants;
import org.apache.flume.conf.BatchSizeSupported;
import org.apache.flume.conf.ComponentConfiguration;
import org.apache.flume.conf.Configurables;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.conf.FlumeConfiguration.AgentConfiguration;
import org.apache.flume.conf.TransactionCapacitySupported;
import org.apache.flume.conf.channel.ChannelSelectorConfiguration;
import org.apache.flume.conf.sink.SinkConfiguration;
import org.apache.flume.conf.sink.SinkGroupConfiguration;
import org.apache.flume.conf.source.SourceConfiguration;
import org.apache.flume.sink.DefaultSinkFactory;
import org.apache.flume.sink.DefaultSinkProcessor;
import org.apache.flume.sink.SinkGroup;
import org.apache.flume.source.DefaultSourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
@SuppressFBWarnings("REC_CATCH_EXCEPTION")
public abstract class AbstractConfigurationProvider implements ConfigurationProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractConfigurationProvider.class);
private final String agentName;
private final SourceFactory sourceFactory;
private final SinkFactory sinkFactory;
private final ChannelFactory channelFactory;
private final Map<Class<? extends Channel>, Map<String, Channel>> channelCache;
public AbstractConfigurationProvider(String agentName) {
super();
this.agentName = agentName;
this.sourceFactory = new DefaultSourceFactory();
this.sinkFactory = new DefaultSinkFactory();
this.channelFactory = new DefaultChannelFactory();
channelCache = new HashMap<Class<? extends Channel>, Map<String, Channel>>();
}
protected abstract FlumeConfiguration getFlumeConfiguration();
public MaterializedConfiguration getConfiguration() {
MaterializedConfiguration conf = new SimpleMaterializedConfiguration();
FlumeConfiguration fconfig = getFlumeConfiguration();
AgentConfiguration agentConf = fconfig.getConfigurationFor(getAgentName());
if (agentConf != null) {
Map<String, ChannelComponent> channelComponentMap = Maps.newHashMap();
Map<String, SourceRunner> sourceRunnerMap = Maps.newHashMap();
Map<String, SinkRunner> sinkRunnerMap = Maps.newHashMap();
try {
loadChannels(agentConf, channelComponentMap);
loadSources(agentConf, channelComponentMap, sourceRunnerMap);
loadSinks(agentConf, channelComponentMap, sinkRunnerMap);
Set<String> channelNames = new HashSet<String>(channelComponentMap.keySet());
for (String channelName : channelNames) {
ChannelComponent channelComponent = channelComponentMap.get(channelName);
if (channelComponent.components.isEmpty()) {
LOGGER.warn(String.format("Channel %s has no components connected" +
" and has been removed.", channelName));
channelComponentMap.remove(channelName);
Map<String, Channel> nameChannelMap =
channelCache.get(channelComponent.channel.getClass());
if (nameChannelMap != null) {
nameChannelMap.remove(channelName);
}
} else {
LOGGER.info(String.format("Channel %s connected to %s",
channelName, channelComponent.components.toString()));
conf.addChannel(channelName, channelComponent.channel);
}
}
for (Map.Entry<String, SourceRunner> entry : sourceRunnerMap.entrySet()) {
conf.addSourceRunner(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, SinkRunner> entry : sinkRunnerMap.entrySet()) {
conf.addSinkRunner(entry.getKey(), entry.getValue());
}
} catch (InstantiationException ex) {
LOGGER.error("Failed to instantiate component", ex);
} finally {
channelComponentMap.clear();
sourceRunnerMap.clear();
sinkRunnerMap.clear();
}
} else {
LOGGER.warn("No configuration found for this host:{}", getAgentName());
}
return conf;
}
public String getAgentName() {
return agentName;
}
private void loadChannels(AgentConfiguration agentConf,
Map<String, ChannelComponent> channelComponentMap)
throws InstantiationException {
LOGGER.info("Creating channels");
/*
* Some channels will be reused across re-configurations. To handle this,
* we store all the names of current channels, perform the reconfiguration,
* and then if a channel was not used, we delete our reference to it.
* This supports the scenario where you enable channel "ch0" then remove it
* and add it back. Without this, channels like memory channel would cause
* the first instances data to show up in the seconds.
*/
ListMultimap<Class<? extends Channel>, String> channelsNotReused =
ArrayListMultimap.create();
// assume all channels will not be re-used
for (Map.Entry<Class<? extends Channel>, Map<String, Channel>> entry :
channelCache.entrySet()) {
Class<? extends Channel> channelKlass = entry.getKey();
Set<String> channelNames = entry.getValue().keySet();
channelsNotReused.get(channelKlass).addAll(channelNames);
}
Set<String> channelNames = agentConf.getChannelSet();
Map<String, ComponentConfiguration> compMap = agentConf.getChannelConfigMap();
/*
* Components which have a ComponentConfiguration object
*/
for (String chName : channelNames) {
ComponentConfiguration comp = compMap.get(chName);
if (comp != null) {
Channel channel = getOrCreateChannel(channelsNotReused,
comp.getComponentName(), comp.getType());
try {
Configurables.configure(channel, comp);
channelComponentMap.put(comp.getComponentName(),
new ChannelComponent(channel));
LOGGER.info("Created channel " + chName);
} catch (Exception e) {
String msg = String.format("Channel %s has been removed due to an " +
"error during configuration", chName);
LOGGER.error(msg, e);
}
}
}
/*
* Components which DO NOT have a ComponentConfiguration object
* and use only Context
*/
for (String chName : channelNames) {
Context context = agentConf.getChannelContext().get(chName);
if (context != null) {
Channel channel = getOrCreateChannel(channelsNotReused, chName,
context.getString(BasicConfigurationConstants.CONFIG_TYPE));
try {
Configurables.configure(channel, context);
channelComponentMap.put(chName, new ChannelComponent(channel));
LOGGER.info("Created channel " + chName);
} catch (Exception e) {
String msg = String.format("Channel %s has been removed due to an " +
"error during configuration", chName);
LOGGER.error(msg, e);
}
}
}
/*
* Any channel which was not re-used, will have it's reference removed
*/
for (Class<? extends Channel> channelKlass : channelsNotReused.keySet()) {
Map<String, Channel> channelMap = channelCache.get(channelKlass);
if (channelMap != null) {
for (String channelName : channelsNotReused.get(channelKlass)) {
if (channelMap.remove(channelName) != null) {
LOGGER.info("Removed {} of type {}", channelName, channelKlass);
}
}
if (channelMap.isEmpty()) {
channelCache.remove(channelKlass);
}
}
}
}
private Channel getOrCreateChannel(
ListMultimap<Class<? extends Channel>, String> channelsNotReused,
String name, String type)
throws FlumeException {
Class<? extends Channel> channelClass = channelFactory.getClass(type);
/*
* Channel has requested a new instance on each re-configuration
*/
if (channelClass.isAnnotationPresent(Disposable.class)) {
Channel channel = channelFactory.create(name, type);
channel.setName(name);
return channel;
}
Map<String, Channel> channelMap = channelCache.get(channelClass);
if (channelMap == null) {
channelMap = new HashMap<String, Channel>();
channelCache.put(channelClass, channelMap);
}
Channel channel = channelMap.get(name);
if (channel == null) {
channel = channelFactory.create(name, type);
channel.setName(name);
channelMap.put(name, channel);
}
channelsNotReused.get(channelClass).remove(name);
return channel;
}
private void loadSources(AgentConfiguration agentConf,
Map<String, ChannelComponent> channelComponentMap,
Map<String, SourceRunner> sourceRunnerMap)
throws InstantiationException {
Set<String> sourceNames = agentConf.getSourceSet();
Map<String, ComponentConfiguration> compMap =
agentConf.getSourceConfigMap();
/*
* Components which have a ComponentConfiguration object
*/
for (String sourceName : sourceNames) {
ComponentConfiguration comp = compMap.get(sourceName);
if (comp != null) {
SourceConfiguration config = (SourceConfiguration) comp;
Source source = sourceFactory.create(comp.getComponentName(),
comp.getType());
try {
Configurables.configure(source, config);
Set<String> channelNames = config.getChannels();
List<Channel> sourceChannels =
getSourceChannels(channelComponentMap, source, channelNames);
if (sourceChannels.isEmpty()) {
String msg = String.format("Source %s is not connected to a " +
"channel", sourceName);
throw new IllegalStateException(msg);
}
ChannelSelectorConfiguration selectorConfig =
config.getSelectorConfiguration();
ChannelSelector selector = ChannelSelectorFactory.create(
sourceChannels, selectorConfig);
ChannelProcessor channelProcessor = new ChannelProcessor(selector);
Configurables.configure(channelProcessor, config);
source.setChannelProcessor(channelProcessor);
sourceRunnerMap.put(comp.getComponentName(),
SourceRunner.forSource(source));
for (Channel channel : sourceChannels) {
ChannelComponent channelComponent =
Preconditions.checkNotNull(channelComponentMap.get(channel.getName()),
String.format("Channel %s", channel.getName()));
channelComponent.components.add(sourceName);
}
} catch (Exception e) {
String msg = String.format("Source %s has been removed due to an " +
"error during configuration", sourceName);
LOGGER.error(msg, e);
}
}
}
/*
* Components which DO NOT have a ComponentConfiguration object
* and use only Context
*/
Map<String, Context> sourceContexts = agentConf.getSourceContext();
for (String sourceName : sourceNames) {
Context context = sourceContexts.get(sourceName);
if (context != null) {
Source source =
sourceFactory.create(sourceName,
context.getString(BasicConfigurationConstants.CONFIG_TYPE));
try {
Configurables.configure(source, context);
String[] channelNames = context.getString(
BasicConfigurationConstants.CONFIG_CHANNELS).split("\\s+");
List<Channel> sourceChannels =
getSourceChannels(channelComponentMap, source, Arrays.asList(channelNames));
if (sourceChannels.isEmpty()) {
String msg = String.format("Source %s is not connected to a " +
"channel", sourceName);
throw new IllegalStateException(msg);
}
Map<String, String> selectorConfig = context.getSubProperties(
BasicConfigurationConstants.CONFIG_SOURCE_CHANNELSELECTOR_PREFIX);
ChannelSelector selector = ChannelSelectorFactory.create(
sourceChannels, selectorConfig);
ChannelProcessor channelProcessor = new ChannelProcessor(selector);
Configurables.configure(channelProcessor, context);
source.setChannelProcessor(channelProcessor);
sourceRunnerMap.put(sourceName,
SourceRunner.forSource(source));
for (Channel channel : sourceChannels) {
ChannelComponent channelComponent =
Preconditions.checkNotNull(channelComponentMap.get(channel.getName()),
String.format("Channel %s", channel.getName()));
channelComponent.components.add(sourceName);
}
} catch (Exception e) {
String msg = String.format("Source %s has been removed due to an " +
"error during configuration", sourceName);
LOGGER.error(msg, e);
}
}
}
}
private List<Channel> getSourceChannels(Map<String, ChannelComponent> channelComponentMap,
Source source, Collection<String> channelNames) throws InstantiationException {
List<Channel> sourceChannels = new ArrayList<Channel>();
for (String chName : channelNames) {
ChannelComponent channelComponent = channelComponentMap.get(chName);
if (channelComponent != null) {
checkSourceChannelCompatibility(source, channelComponent.channel);
sourceChannels.add(channelComponent.channel);
}
}
return sourceChannels;
}
private void checkSourceChannelCompatibility(Source source, Channel channel)
throws InstantiationException {
if (source instanceof BatchSizeSupported && channel instanceof TransactionCapacitySupported) {
long transCap = ((TransactionCapacitySupported) channel).getTransactionCapacity();
long batchSize = ((BatchSizeSupported) source).getBatchSize();
if (transCap < batchSize) {
String msg = String.format(
"Incompatible source and channel settings defined. " +
"source's batch size is greater than the channels transaction capacity. " +
"Source: %s, batch size = %d, channel %s, transaction capacity = %d",
source.getName(), batchSize,
channel.getName(), transCap);
throw new InstantiationException(msg);
}
}
}
private void checkSinkChannelCompatibility(Sink sink, Channel channel)
throws InstantiationException {
if (sink instanceof BatchSizeSupported && channel instanceof TransactionCapacitySupported) {
long transCap = ((TransactionCapacitySupported) channel).getTransactionCapacity();
long batchSize = ((BatchSizeSupported) sink).getBatchSize();
if (transCap < batchSize) {
String msg = String.format(
"Incompatible sink and channel settings defined. " +
"sink's batch size is greater than the channels transaction capacity. " +
"Sink: %s, batch size = %d, channel %s, transaction capacity = %d",
sink.getName(), batchSize,
channel.getName(), transCap);
throw new InstantiationException(msg);
}
}
}
private void loadSinks(AgentConfiguration agentConf,
Map<String, ChannelComponent> channelComponentMap, Map<String, SinkRunner> sinkRunnerMap)
throws InstantiationException {
Set<String> sinkNames = agentConf.getSinkSet();
Map<String, ComponentConfiguration> compMap =
agentConf.getSinkConfigMap();
Map<String, Sink> sinks = new HashMap<String, Sink>();
/*
* Components which have a ComponentConfiguration object
*/
for (String sinkName : sinkNames) {
ComponentConfiguration comp = compMap.get(sinkName);
if (comp != null) {
SinkConfiguration config = (SinkConfiguration) comp;
Sink sink = sinkFactory.create(comp.getComponentName(), comp.getType());
try {
Configurables.configure(sink, config);
ChannelComponent channelComponent = channelComponentMap.get(config.getChannel());
if (channelComponent == null) {
String msg = String.format("Sink %s is not connected to a " +
"channel", sinkName);
throw new IllegalStateException(msg);
}
checkSinkChannelCompatibility(sink, channelComponent.channel);
sink.setChannel(channelComponent.channel);
sinks.put(comp.getComponentName(), sink);
channelComponent.components.add(sinkName);
} catch (Exception e) {
String msg = String.format("Sink %s has been removed due to an " +
"error during configuration", sinkName);
LOGGER.error(msg, e);
}
}
}
/*
* Components which DO NOT have a ComponentConfiguration object
* and use only Context
*/
Map<String, Context> sinkContexts = agentConf.getSinkContext();
for (String sinkName : sinkNames) {
Context context = sinkContexts.get(sinkName);
if (context != null) {
Sink sink = sinkFactory.create(sinkName, context.getString(
BasicConfigurationConstants.CONFIG_TYPE));
try {
Configurables.configure(sink, context);
ChannelComponent channelComponent =
channelComponentMap.get(
context.getString(BasicConfigurationConstants.CONFIG_CHANNEL));
if (channelComponent == null) {
String msg = String.format("Sink %s is not connected to a " +
"channel", sinkName);
throw new IllegalStateException(msg);
}
checkSinkChannelCompatibility(sink, channelComponent.channel);
sink.setChannel(channelComponent.channel);
sinks.put(sinkName, sink);
channelComponent.components.add(sinkName);
} catch (Exception e) {
String msg = String.format("Sink %s has been removed due to an " +
"error during configuration", sinkName);
LOGGER.error(msg, e);
}
}
}
loadSinkGroups(agentConf, sinks, sinkRunnerMap);
}
private void loadSinkGroups(AgentConfiguration agentConf,
Map<String, Sink> sinks, Map<String, SinkRunner> sinkRunnerMap)
throws InstantiationException {
Set<String> sinkGroupNames = agentConf.getSinkgroupSet();
Map<String, ComponentConfiguration> compMap =
agentConf.getSinkGroupConfigMap();
Map<String, String> usedSinks = new HashMap<String, String>();
for (String groupName: sinkGroupNames) {
ComponentConfiguration comp = compMap.get(groupName);
if (comp != null) {
SinkGroupConfiguration groupConf = (SinkGroupConfiguration) comp;
List<Sink> groupSinks = new ArrayList<Sink>();
for (String sink : groupConf.getSinks()) {
Sink s = sinks.remove(sink);
if (s == null) {
String sinkUser = usedSinks.get(sink);
if (sinkUser != null) {
throw new InstantiationException(String.format(
"Sink %s of group %s already " +
"in use by group %s", sink, groupName, sinkUser));
} else {
throw new InstantiationException(String.format(
"Sink %s of group %s does "
+ "not exist or is not properly configured", sink,
groupName));
}
}
groupSinks.add(s);
usedSinks.put(sink, groupName);
}
try {
SinkGroup group = new SinkGroup(groupSinks);
Configurables.configure(group, groupConf);
sinkRunnerMap.put(comp.getComponentName(),
new SinkRunner(group.getProcessor()));
} catch (Exception e) {
String msg = String.format("SinkGroup %s has been removed due to " +
"an error during configuration", groupName);
LOGGER.error(msg, e);
}
}
}
// add any unassigned sinks to solo collectors
for (Entry<String, Sink> entry : sinks.entrySet()) {
if (!usedSinks.containsValue(entry.getKey())) {
try {
SinkProcessor pr = new DefaultSinkProcessor();
List<Sink> sinkMap = new ArrayList<Sink>();
sinkMap.add(entry.getValue());
pr.setSinks(sinkMap);
Configurables.configure(pr, new Context());
sinkRunnerMap.put(entry.getKey(), new SinkRunner(pr));
} catch (Exception e) {
String msg = String.format("SinkGroup %s has been removed due to " +
"an error during configuration", entry.getKey());
LOGGER.error(msg, e);
}
}
}
}
private static class ChannelComponent {
final Channel channel;
final List<String> components;
ChannelComponent(Channel channel) {
this.channel = channel;
components = Lists.newArrayList();
}
}
protected Map<String, String> toMap(Properties properties) {
Map<String, String> result = Maps.newHashMap();
Enumeration<?> propertyNames = properties.propertyNames();
while (propertyNames.hasMoreElements()) {
String name = (String) propertyNames.nextElement();
String value = properties.getProperty(name);
result.put(name, value);
}
return result;
}
} | 9,588 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/SimpleMaterializedConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.node;
import java.util.HashMap;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.SinkRunner;
import org.apache.flume.SourceRunner;
import com.google.common.collect.ImmutableMap;
public class SimpleMaterializedConfiguration implements MaterializedConfiguration {
private final Map<String, Channel> channels;
private final Map<String, SourceRunner> sourceRunners;
private final Map<String, SinkRunner> sinkRunners;
public SimpleMaterializedConfiguration() {
channels = new HashMap<String, Channel>();
sourceRunners = new HashMap<String, SourceRunner>();
sinkRunners = new HashMap<String, SinkRunner>();
}
@Override
public String toString() {
return "{ sourceRunners:" + sourceRunners + " sinkRunners:" + sinkRunners
+ " channels:" + channels + " }";
}
@Override
public void addSourceRunner(String name, SourceRunner sourceRunner) {
sourceRunners.put(name, sourceRunner);
}
@Override
public void addSinkRunner(String name, SinkRunner sinkRunner) {
sinkRunners.put(name, sinkRunner);
}
@Override
public void addChannel(String name, Channel channel) {
channels.put(name, channel);
}
@Override
public Map<String, Channel> getChannels() {
return ImmutableMap.copyOf(channels);
}
@Override
public Map<String, SourceRunner> getSourceRunners() {
return ImmutableMap.copyOf(sourceRunners);
}
@Override
public Map<String, SinkRunner> getSinkRunners() {
return ImmutableMap.copyOf(sinkRunners);
}
}
| 9,589 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/MaterializedConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.node;
import org.apache.flume.Channel;
import org.apache.flume.SinkRunner;
import org.apache.flume.SourceRunner;
import java.util.Map;
/**
* MaterializedConfiguration represents the materialization of a Flume
* properties file. That is it's the actual Source, Sink, and Channels
* represented in the configuration file.
*/
public interface MaterializedConfiguration {
void addSourceRunner(String name, SourceRunner sourceRunner);
void addSinkRunner(String name, SinkRunner sinkRunner);
void addChannel(String name, Channel channel);
Map<String, SourceRunner> getSourceRunners();
Map<String, SinkRunner> getSinkRunners();
Map<String, Channel> getChannels();
}
| 9,590 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/UriConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.flume.CounterGroup;
import org.apache.flume.conf.ConfigurationException;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.lifecycle.LifecycleState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.EventBus;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* <p>
* A configuration provider that uses properties for specifying
* configuration. The configurations follow the Java properties file syntax
* rules specified at {@link Properties#load(java.io.Reader)}. Every
* configuration value specified in the properties is prefixed by an
* <em>Agent Name</em> which helps isolate an individual agent's namespace.
* </p>
* <p>
* Valid configurations must observe the following rules for every agent
* namespace.
* <ul>
* <li>For every <agent name> there must be three lists specified that
* include <tt><agent name>.sources</tt>,
* <tt><agent name>.sinks</tt>, and <tt><agent name>.channels</tt>.
* Each of these lists must contain a space separated list of names
* corresponding to that particular entity.</li>
* <li>For each source named in <tt><agent name>.sources</tt>, there must
* be a non-empty <tt>type</tt> attribute specified from the valid set of source
* types. For example:
* <tt><agent name>.sources.<source name>.type = event</tt></li>
* <li>For each source named in <tt><agent name>.sources</tt>, there must
* be a space-separated list of channel names that the source will associate
* with during runtime. Each of these names must be contained in the channels
* list specified by <tt><agent name>.channels</tt>. For example:
* <tt><agent name>.sources.<source name>.channels =
* <channel-1 name> <channel-2 name></tt></li>
* <li>For each source named in the <tt><agent name>.sources</tt>, there
* must be a <tt>runner</tt> namespace of configuration that configures the
* associated source runner. For example:
* <tt><agent name>.sources.<source name>.runner.type = avro</tt>.
* This namespace can also be used to configure other configuration of the
* source runner as needed. For example:
* <tt><agent name>.sources.<source name>.runner.port = 10101</tt>
* </li>
* <li>For each source named in <tt><sources>.sources</tt> there can
* be an optional <tt>selector.type</tt> specified that identifies the type
* of channel selector associated with the source. If not specified, the
* default replicating channel selector is used.
* </li><li>For each channel named in the <tt><agent name>.channels</tt>,
* there must be a non-empty <tt>type</tt> attribute specified from the valid
* set of channel types. For example:
* <tt><agent name>.channels.<channel name>.type = mem</tt></li>
* <li>For each sink named in the <tt><agent name>.sinks</tt>, there must
* be a non-empty <tt>type</tt> attribute specified from the valid set of sink
* types. For example:
* <tt><agent name>.sinks.<sink name>.type = hdfs</tt></li>
* <li>For each sink named in the <tt><agent name>.sinks</tt>, there must
* be a non-empty single-valued channel name specified as the value of the
* <tt>channel</tt> attribute. This value must be contained in the channels list
* specified by <tt><agent name>.channels</tt>. For example:
* <tt><agent name>.sinks.<sink name>.channel =
* <channel name></tt></li>
* <li>For each sink named in the <tt><agent name>.sinks</tt>, there must
* be a <tt>runner</tt> namespace of configuration that configures the
* associated sink runner. For example:
* <tt><agent name>.sinks.<sink name>.runner.type = polling</tt>.
* This namespace can also be used to configure other configuration of the sink
* runner as needed. For example:
* <tt><agent name>.sinks.<sink name>.runner.polling.interval =
* 60</tt></li>
* <li>A fourth optional list <tt><agent name>.sinkgroups</tt>
* may be added to each agent, consisting of unique space separated names
* for groups</li>
* <li>Each sinkgroup must specify sinks, containing a list of all sinks
* belonging to it. These cannot be shared by multiple groups.
* Further, one can set a processor and behavioral parameters to determine
* how sink selection is made via <tt><agent name>.sinkgroups.<
* group name<.processor</tt>. For further detail refer to individual processor
* documentation</li>
* <li>Sinks not assigned to a group will be assigned to default single sink
* groups.</li>
* </ul>
* <p>
* Apart from the above required configuration values, each source, sink or
* channel can have its own set of arbitrary configuration as required by the
* implementation. Each of these configuration values are expressed by fully
* namespace qualified configuration keys. For example, the configuration
* property called <tt>capacity</tt> for a channel called <tt>ch1</tt> for the
* agent named <tt>host1</tt> with value <tt>1000</tt> will be expressed as:
* <tt>host1.channels.ch1.capacity = 1000</tt>.
* </p>
* <p>
* Any information contained in the configuration file other than what pertains
* to the configured agents, sources, sinks and channels via the explicitly
* enumerated list of sources, sinks and channels per agent name are ignored by
* this provider. Moreover, if any of the required configuration values are not
* present in the configuration file for the configured entities, that entity
* and anything that depends upon it is considered invalid and consequently not
* configured. For example, if a channel is missing its <tt>type</tt> attribute,
* it is considered misconfigured. Also, any sources or sinks that depend upon
* this channel are also considered misconfigured and not initialized.
* </p>
* <p>
* Example configuration file:
*
* <pre>
* #
* # Flume Configuration
* # This file contains configuration for one Agent identified as host1.
* #
*
* host1.sources = avroSource thriftSource
* host1.channels = jdbcChannel
* host1.sinks = hdfsSink
*
* # avroSource configuration
* host1.sources.avroSource.type = org.apache.flume.source.AvroSource
* host1.sources.avroSource.runner.type = avro
* host1.sources.avroSource.runner.port = 11001
* host1.sources.avroSource.channels = jdbcChannel
* host1.sources.avroSource.selector.type = replicating
*
* # thriftSource configuration
* host1.sources.thriftSource.type = org.apache.flume.source.ThriftSource
* host1.sources.thriftSource.runner.type = thrift
* host1.sources.thriftSource.runner.port = 12001
* host1.sources.thriftSource.channels = jdbcChannel
*
* # jdbcChannel configuration
* host1.channels.jdbcChannel.type = jdbc
* host1.channels.jdbcChannel.jdbc.driver = com.mysql.jdbc.Driver
* host1.channels.jdbcChannel.jdbc.connect.url = http://localhost/flumedb
* host1.channels.jdbcChannel.jdbc.username = flume
* host1.channels.jdbcChannel.jdbc.password = flume
*
* # hdfsSink configuration
* host1.sinks.hdfsSink.type = hdfs
* host1.sinks.hdfsSink.hdfs.path = hdfs://localhost/
* host1.sinks.hdfsSink.batchsize = 1000
* host1.sinks.hdfsSink.runner.type = polling
* host1.sinks.hdfsSink.runner.polling.interval = 60
* </pre>
*
* </p>
*
* @see Properties#load(java.io.Reader)
*/
public class UriConfigurationProvider extends AbstractConfigurationProvider
implements LifecycleAware {
private static final Logger LOGGER = LoggerFactory.getLogger(UriConfigurationProvider.class);
private final List<ConfigurationSource> configurationSources;
private final File backupDirectory;
private final EventBus eventBus;
private final int interval;
private final CounterGroup counterGroup;
private LifecycleState lifecycleState = LifecycleState.IDLE;
private ScheduledExecutorService executorService;
public UriConfigurationProvider(String agentName, List<ConfigurationSource> sourceList,
String backupDirectory, EventBus eventBus, int pollInterval) {
super(agentName);
this.configurationSources = sourceList;
this.backupDirectory = backupDirectory != null ? new File(backupDirectory) : null;
this.eventBus = eventBus;
this.interval = pollInterval;
counterGroup = new CounterGroup();
}
@Override
public void start() {
if (eventBus != null && interval > 0) {
executorService = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setNameFormat("conf-file-poller-%d")
.build());
WatcherRunnable watcherRunnable = new WatcherRunnable(configurationSources, counterGroup,
eventBus);
executorService.scheduleWithFixedDelay(watcherRunnable, 0, interval,
TimeUnit.SECONDS);
}
lifecycleState = LifecycleState.START;
}
@Override
public void stop() {
if (executorService != null) {
executorService.shutdown();
try {
if (!executorService.awaitTermination(500, TimeUnit.MILLISECONDS)) {
LOGGER.debug("File watcher has not terminated. Forcing shutdown of executor.");
executorService.shutdownNow();
while (!executorService.awaitTermination(500, TimeUnit.MILLISECONDS)) {
LOGGER.debug("Waiting for file watcher to terminate");
}
}
} catch (InterruptedException e) {
LOGGER.debug("Interrupted while waiting for file watcher to terminate");
Thread.currentThread().interrupt();
}
}
lifecycleState = LifecycleState.STOP;
}
@Override
public LifecycleState getLifecycleState() {
return lifecycleState;
}
protected List<ConfigurationSource> getConfigurationSources() {
return configurationSources;
}
@Override
public FlumeConfiguration getFlumeConfiguration() {
Map<String, String> configMap = null;
Properties properties = new Properties();
for (ConfigurationSource configurationSource : configurationSources) {
try (InputStream is = configurationSource.getInputStream()) {
if (is != null) {
switch (configurationSource.getExtension()) {
case ConfigurationSource.JSON: case ConfigurationSource.YAML:
case ConfigurationSource.XML: {
LOGGER.warn("File extension type {} is unsupported",
configurationSource.getExtension());
break;
}
default: {
properties.load(is);
break;
}
}
}
} catch (IOException ioe) {
LOGGER.warn("Unable to load properties from {}: {}", configurationSource.getUri(),
ioe.getMessage());
}
if (properties.size() > 0) {
configMap = MapResolver.resolveProperties(properties);
}
}
if (configMap != null) {
Properties props = new Properties();
props.putAll(configMap);
if (backupDirectory != null) {
if (backupDirectory.mkdirs()) {
// This is only being logged to keep Spotbugs happy. We can't ignore the result of mkdirs.
LOGGER.debug("Created directories for {}", backupDirectory.toString());
}
File backupFile = getBackupFile(backupDirectory, getAgentName());
try (OutputStream os = new FileOutputStream(backupFile)) {
props.store(os, "Backup created at " + LocalDateTime.now().toString());
} catch (IOException ioe) {
LOGGER.warn("Unable to create backup properties file: {}" + ioe.getMessage());
}
}
} else {
if (backupDirectory != null) {
File backup = getBackupFile(backupDirectory, getAgentName());
if (backup.exists()) {
Properties props = new Properties();
try (InputStream is = new FileInputStream(backup)) {
LOGGER.warn("Unable to access primary configuration. Trying backup");
props.load(is);
configMap = MapResolver.resolveProperties(props);
} catch (IOException ex) {
LOGGER.warn("Error reading backup file: {}", ex.getMessage());
}
}
}
}
if (configMap != null) {
return new FlumeConfiguration(configMap);
} else {
LOGGER.error("No configuration could be found");
return null;
}
}
private File getBackupFile(File backupDirectory, String agentName) {
if (backupDirectory != null) {
return new File(backupDirectory, "." + agentName + ".properties");
}
return null;
}
private class WatcherRunnable implements Runnable {
private List<ConfigurationSource> configurationSources;
private final CounterGroup counterGroup;
private final EventBus eventBus;
public WatcherRunnable(List<ConfigurationSource> sources, CounterGroup counterGroup,
EventBus eventBus) {
this.configurationSources = sources;
this.counterGroup = counterGroup;
this.eventBus = eventBus;
}
@Override
public void run() {
LOGGER.debug("Checking for changes to sources");
counterGroup.incrementAndGet("uri.checks");
try {
boolean isModified = false;
for (ConfigurationSource source : configurationSources) {
if (source.isModified()) {
isModified = true;
}
}
if (isModified) {
eventBus.post(getConfiguration());
}
} catch (ConfigurationException ex) {
LOGGER.warn("Unable to update configuration: {}", ex.getMessage());
}
}
}
}
| 9,591 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/ConfigurationSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
import java.io.InputStream;
/**
* Interface for retrieving configuration data.
*/
public interface ConfigurationSource {
static final String PROPERTIES = "properties";
static final String JSON = "json";
static final String YAML = "yaml";
static final String XML = "xml";
/**
* Returns the InputStream if it hasn't already been processed.
* @return The InputStream or null.
*/
InputStream getInputStream();
/**
* Returns the URI string.
* @return The string URI.
*/
String getUri();
/**
* Determine if the configuration data source has been modified since it was last checked.
* @return true if the data was modified.
*/
default boolean isModified() {
return false;
}
/**
* Return the "file" extension for the specified uri.
* @return The file extension.
*/
String getExtension();
}
| 9,592 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/PollingZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.IOException;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.ChildData;
import org.apache.curator.framework.recipes.cache.NodeCache;
import org.apache.curator.framework.recipes.cache.NodeCacheListener;
import org.apache.flume.FlumeException;
import org.apache.flume.conf.FlumeConfiguration;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.lifecycle.LifecycleState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.EventBus;
public class PollingZooKeeperConfigurationProvider extends
AbstractZooKeeperConfigurationProvider implements LifecycleAware {
private static final Logger LOGGER = LoggerFactory
.getLogger(PollingZooKeeperConfigurationProvider.class);
private final EventBus eventBus;
private final CuratorFramework client;
private NodeCache agentNodeCache;
private FlumeConfiguration flumeConfiguration;
private LifecycleState lifecycleState;
public PollingZooKeeperConfigurationProvider(String agentName,
String zkConnString, String basePath, EventBus eventBus) {
super(agentName, zkConnString, basePath);
this.eventBus = eventBus;
client = createClient();
agentNodeCache = null;
flumeConfiguration = null;
lifecycleState = LifecycleState.IDLE;
}
@Override
protected FlumeConfiguration getFlumeConfiguration() {
return flumeConfiguration;
}
@Override
public void start() {
LOGGER.debug("Starting...");
try {
client.start();
try {
agentNodeCache = new NodeCache(client, basePath + "/" + getAgentName());
agentNodeCache.start();
agentNodeCache.getListenable().addListener(new NodeCacheListener() {
@Override
public void nodeChanged() throws Exception {
refreshConfiguration();
}
});
} catch (Exception e) {
client.close();
throw e;
}
} catch (Exception e) {
lifecycleState = LifecycleState.ERROR;
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new FlumeException(e);
}
}
lifecycleState = LifecycleState.START;
}
private void refreshConfiguration() throws IOException {
LOGGER.info("Refreshing configuration from ZooKeeper");
byte[] data = null;
ChildData childData = agentNodeCache.getCurrentData();
if (childData != null) {
data = childData.getData();
}
flumeConfiguration = configFromBytes(data);
eventBus.post(getConfiguration());
}
@Override
public void stop() {
LOGGER.debug("Stopping...");
if (agentNodeCache != null) {
try {
agentNodeCache.close();
} catch (IOException e) {
LOGGER.warn("Encountered exception while stopping", e);
lifecycleState = LifecycleState.ERROR;
}
}
try {
client.close();
} catch (Exception e) {
LOGGER.warn("Error stopping Curator client", e);
lifecycleState = LifecycleState.ERROR;
}
if (lifecycleState != LifecycleState.ERROR) {
lifecycleState = LifecycleState.STOP;
}
}
@Override
public LifecycleState getLifecycleState() {
return lifecycleState;
}
}
| 9,593 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/StaticZooKeeperConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import org.apache.curator.framework.CuratorFramework;
import org.apache.flume.FlumeException;
import org.apache.flume.conf.FlumeConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StaticZooKeeperConfigurationProvider extends
AbstractZooKeeperConfigurationProvider {
private static final Logger LOGGER = LoggerFactory
.getLogger(StaticZooKeeperConfigurationProvider.class);
public StaticZooKeeperConfigurationProvider(String agentName,
String zkConnString, String basePath) {
super(agentName, zkConnString, basePath);
}
@Override
protected FlumeConfiguration getFlumeConfiguration() {
try {
CuratorFramework cf = createClient();
cf.start();
try {
byte[] data = cf.getData().forPath(basePath + "/" + getAgentName());
return configFromBytes(data);
} finally {
cf.close();
}
} catch (Exception e) {
LOGGER.error("Error getting configuration info from Zookeeper", e);
throw new FlumeException(e);
}
}
}
| 9,594 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/Initializable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.flume.node;
/**
* An interface implmmented by components that need access after all components have been created but before
* any have been started.
*/
public interface Initializable {
/**
* Called to initialize the component.
* @param configuration the materialized configuration.
*/
void initialize(MaterializedConfiguration configuration);
}
| 9,595 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/HttpConfigurationSource.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import org.apache.commons.io.IOUtils;
import org.apache.flume.CounterGroup;
import org.apache.flume.conf.ConfigurationException;
import org.apache.flume.node.net.AuthorizationProvider;
import org.apache.flume.node.net.UrlConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HttpConfigurationSource implements ConfigurationSource {
private static final Logger LOGGER = LoggerFactory.getLogger(HttpConfigurationSource.class);
private static final int NOT_MODIFIED = 304;
private static final int NOT_AUTHORIZED = 401;
private static final int NOT_FOUND = 404;
private static final int OK = 200;
private static final int BUF_SIZE = 1024;
private final URI uri;
private final CounterGroup counterGroup;
private final AuthorizationProvider authorizationProvider;
private final boolean verifyHost;
private long lastModified = 0;
private byte[] data = null;
public HttpConfigurationSource(URI uri, AuthorizationProvider authorizationProvider,
boolean verifyHost) {
this.authorizationProvider = authorizationProvider;
this.uri = uri;
this.verifyHost = verifyHost;
counterGroup = new CounterGroup();
readInputStream();
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(data);
}
@Override
public String getUri() {
return this.uri.toString();
}
@Override
public String getExtension() {
int length = uri.getPath().indexOf(".");
if (length <= 1) {
return PROPERTIES;
}
return uri.getPath().substring(length + 1);
}
@Override
public boolean isModified() {
LOGGER.debug("Checking {} for changes", uri);
counterGroup.incrementAndGet("uri.checks");
try {
LOGGER.info("Reloading configuration from:{}", uri);
if (readInputStream()) {
counterGroup.incrementAndGet("uri.loads");
return true;
}
} catch (ConfigurationException ex) {
LOGGER.error("Unable to access configuration due to {}: ", ex.getMessage());
}
return false;
}
private boolean readInputStream() {
try {
HttpURLConnection connection = UrlConnectionFactory.createConnection(uri.toURL(),
authorizationProvider, lastModified, verifyHost);
connection.connect();
try {
int code = connection.getResponseCode();
switch (code) {
case NOT_MODIFIED: {
LOGGER.debug("Configuration Not Modified");
return false;
}
case OK: {
try (InputStream is = connection.getInputStream()) {
lastModified = connection.getLastModified();
LOGGER.debug("Content was modified for {}. lastModified: {}", uri.toString(), lastModified);
data = IOUtils.toByteArray(is);
return true;
} catch (final IOException e) {
try (InputStream es = connection.getErrorStream()) {
LOGGER.info("Error accessing configuration at {}: {}", uri, readStream(es));
} catch (final IOException ioe) {
LOGGER.error("Error accessing configuration at {}: {}", uri, e.getMessage());
}
throw new ConfigurationException("Unable to access " + uri.toString(), e);
}
}
case NOT_FOUND: {
throw new ConfigurationException("Unable to locate " + uri.toString());
}
case NOT_AUTHORIZED: {
throw new ConfigurationException("Authorization failed");
}
default: {
if (code < 0) {
LOGGER.info("Invalid response code returned");
} else {
LOGGER.info("Unexpected response code returned {}", code);
}
return false;
}
}
} finally {
connection.disconnect();
}
} catch (IOException e) {
LOGGER.warn("Error accessing {}: {}", uri.toString(), e.getMessage());
throw new ConfigurationException("Unable to access " + uri.toString(), e);
}
}
private byte[] readStream(InputStream is) throws IOException {
ByteArrayOutputStream result = new ByteArrayOutputStream();
byte[] buffer = new byte[BUF_SIZE];
int length;
while ((length = is.read(buffer)) != -1) {
result.write(buffer, 0, length);
}
return result.toByteArray();
}
@Override
public String toString() {
return "{ uri:" + uri + "}";
}
}
| 9,596 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/PollingPropertiesFileConfigurationProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.File;
import com.google.common.collect.Lists;
import com.google.common.eventbus.EventBus;
/**
* @deprecated Use UriConfigurationProvider instead.
*/
@Deprecated
public class PollingPropertiesFileConfigurationProvider extends UriConfigurationProvider {
public PollingPropertiesFileConfigurationProvider(String agentName, File file, EventBus eventBus,
int interval) {
super(agentName, Lists.newArrayList(new FileConfigurationSource(file.toURI())), null,
eventBus, interval);
}
}
| 9,597 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/ConfigurationProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.node;
public interface ConfigurationProvider {
MaterializedConfiguration getConfiguration();
}
| 9,598 |
0 | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-node/src/main/java/org/apache/flume/node/FileConfigurationSource.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.node;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.flume.CounterGroup;
import org.apache.flume.conf.ConfigurationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileConfigurationSource implements ConfigurationSource {
private static final Logger LOGGER = LoggerFactory.getLogger(FileConfigurationSource.class);
private final Path path;
private final URI uri;
private final CounterGroup counterGroup;
private byte[] data;
private long lastChange;
public FileConfigurationSource(URI uri) {
this.uri = uri;
this.path = Paths.get(uri);
counterGroup = new CounterGroup();
try {
this.lastChange = path.toFile().lastModified();
data = Files.readAllBytes(this.path);
} catch (IOException ioe) {
LOGGER.error("Unable to read {}: {}", path.toString(), ioe.getMessage());
throw new ConfigurationException("Unable to read file " + path.toString(), ioe);
}
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(data);
}
@Override
public String getUri() {
return this.uri.toString();
}
@Override
public String getExtension() {
int length = uri.getPath().indexOf(".");
if (length <= 1) {
return PROPERTIES;
}
return uri.getPath().substring(length + 1);
}
@Override
public boolean isModified() {
LOGGER.debug("Checking file:{} for changes", path.toString());
counterGroup.incrementAndGet("file.checks");
long lastModified = path.toFile().lastModified();
if (lastModified > lastChange) {
LOGGER.info("Reloading configuration file:{}", path.toString());
counterGroup.incrementAndGet("file.loads");
lastChange = lastModified;
try {
data = Files.readAllBytes(path);
return true;
} catch (Exception e) {
LOGGER.error("Failed to load configuration data. Exception follows.", e);
} catch (NoClassDefFoundError e) {
LOGGER.error("Failed to start agent because dependencies were not found in classpath."
+ "Error follows.", e);
} catch (Throwable t) {
// caught because the caller does not handle or log Throwables
LOGGER.error("Unhandled error", t);
}
}
return false;
}
@Override
public String toString() {
return "{ file:" + path.toString() + "}";
}
}
| 9,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.