index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ProjectRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.utils.Constants;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
public class ProjectRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ProjectRepositoryTest.class);
private String testGateway = "testGateway";
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
public ProjectRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
}
@Test
public void ProjectRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId(testGateway);
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
assertTrue(projectId != null);
Project updatedProject = project.deepCopy();
// Simulate clients that may or may not set projectId but will pass
// projectId as an argument to updateProject
updatedProject.unsetProjectID();
updatedProject.setName("updated projectName");
updatedProject.setDescription("projectDescription");
projectRepository.updateProject(updatedProject, projectId);
Project retrievedProject = projectRepository.getProject(projectId);
assertEquals(gatewayId, retrievedProject.getGatewayId());
assertEquals("updated projectName", retrievedProject.getName());
assertEquals("projectDescription", retrievedProject.getDescription());
assertTrue(projectRepository.getProjectIDs(Constants.FieldConstants.ProjectConstants.OWNER, "user").contains(projectId));
List<String> accessibleProjectIds = new ArrayList<>();
accessibleProjectIds.add(projectId);
Map<String, String> filters = new HashMap<>();
filters.put(Constants.FieldConstants.ProjectConstants.GATEWAY_ID, retrievedProject.getGatewayId());
filters.put(Constants.FieldConstants.ProjectConstants.OWNER, retrievedProject.getOwner());
filters.put(Constants.FieldConstants.ProjectConstants.PROJECT_NAME, retrievedProject.getName());
filters.put(Constants.FieldConstants.ProjectConstants.DESCRIPTION, retrievedProject.getDescription());
assertTrue(projectRepository.searchAllAccessibleProjects(accessibleProjectIds, filters,
-1, 0, null, null).size() == 1);
projectRepository.removeProject(projectId);
assertFalse(projectRepository.isProjectExist(projectId));
gatewayRepository.removeGateway(gatewayId);
}
}
| 800 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/UserRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.user.UserProfile;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.registry.core.entities.expcatalog.UserPK;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class UserRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(UserRepositoryTest.class);
GatewayRepository gatewayRepository;
UserRepository userRepository;
private String gatewayId;
private String gatewayId2;
public UserRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
userRepository = new UserRepository();
}
@Before
public void createTestData() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gatewayId = gatewayRepository.addGateway(gateway);
Gateway gateway2 = new Gateway();
gateway2.setGatewayId("gateway2");
gatewayId2 = gatewayRepository.addGateway(gateway2);
}
@After
public void deleteTestData() throws RegistryException {
gatewayRepository.removeGateway(gatewayId);
gatewayRepository.removeGateway(gatewayId2);
}
@Test
public void test() throws RegistryException {
UserProfile userProfile = new UserProfile();
userProfile.setUserId("username");
userProfile.setAiravataInternalUserId("username@" + gatewayId);
userProfile.setGatewayId(gatewayId);
userRepository.addUser(userProfile);
UserProfile retrievedUserProfile = userRepository.get(new UserPK(gatewayId, "username"));
assertEquals("username", retrievedUserProfile.getUserId());
assertEquals("username@" + gatewayId, retrievedUserProfile.getAiravataInternalUserId());
assertEquals(gatewayId, retrievedUserProfile.getGatewayId());
userRepository.delete(new UserPK(gatewayId, "username"));
}
@Test
public void testGetAllUsernamesInGateway() throws RegistryException {
// Two users in first gateway, only one in the second gateway
String username1 = "username1";
UserProfile userProfile = new UserProfile();
userProfile.setUserId(username1);
userProfile.setAiravataInternalUserId(username1 + "@" + gatewayId);
userProfile.setGatewayId(gatewayId);
userRepository.addUser(userProfile);
String username2 = "username2";
UserProfile userProfile2 = new UserProfile();
userProfile2.setUserId(username2);
userProfile2.setAiravataInternalUserId(username2 + "@" + gatewayId);
userProfile2.setGatewayId(gatewayId);
userRepository.addUser(userProfile2);
String username3 = "username3";
UserProfile userProfile3 = new UserProfile();
userProfile3.setUserId(username3);
userProfile3.setAiravataInternalUserId(username3 + "@" + gatewayId2);
userProfile3.setGatewayId(gatewayId2);
userRepository.addUser(userProfile3);
List<String> gateway1Usernames = userRepository.getAllUsernamesInGateway(gatewayId);
assertEquals(2, gateway1Usernames.size());
assertEquals(new HashSet<>(Arrays.asList(username1, username2)), new HashSet<>(gateway1Usernames));
List<String> gateway2Usernames = userRepository.getAllUsernamesInGateway(gatewayId2);
assertEquals(1, gateway2Usernames.size());
assertEquals(Collections.singleton(username3), new HashSet<>(gateway2Usernames));
userRepository.delete(new UserPK(gatewayId, username1));
userRepository.delete(new UserPK(gatewayId, username2));
userRepository.delete(new UserPK(gatewayId2, username3));
}
}
| 801 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentInputRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ExperimentInputRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ExperimentInputRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ExperimentInputRepository experimentInputRepository;
public ExperimentInputRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
experimentInputRepository = new ExperimentInputRepository();
}
@Test
public void ExperimentInputRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId != null);
InputDataObjectType inputDataObjectTypeExp = new InputDataObjectType();
inputDataObjectTypeExp.setName("inputE");
inputDataObjectTypeExp.setType(DataType.STRING);
List<InputDataObjectType> inputDataObjectTypeExpList = new ArrayList<>();
inputDataObjectTypeExpList.add(inputDataObjectTypeExp);
assertEquals(experimentId, experimentInputRepository.addExperimentInputs(inputDataObjectTypeExpList, experimentId));
assertTrue(experimentRepository.getExperiment(experimentId).getExperimentInputs().size() == 1);
inputDataObjectTypeExp.setValue("iValueE");
experimentInputRepository.updateExperimentInputs(inputDataObjectTypeExpList, experimentId);
List<InputDataObjectType> retrievedExpInputsList = experimentInputRepository.getExperimentInputs(experimentId);
assertTrue(retrievedExpInputsList.size() == 1);
assertEquals("iValueE", retrievedExpInputsList.get(0).getValue());
assertEquals(DataType.STRING, retrievedExpInputsList.get(0).getType());
experimentRepository.removeExperiment(experimentId);
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 802 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessOutputRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ProcessOutputRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ProcessOutputRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ProcessRepository processRepository;
ProcessOutputRepository processOutputRepository;
public ProcessOutputRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
processRepository = new ProcessRepository();
processOutputRepository = new ProcessOutputRepository();
}
@Test
public void ProcessOutputRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
ProcessModel processModel = new ProcessModel(null, experimentId);
String processId = processRepository.addProcess(processModel, experimentId);
assertTrue(processId != null);
OutputDataObjectType outputDataObjectProType = new OutputDataObjectType();
outputDataObjectProType.setName("outputP");
outputDataObjectProType.setType(DataType.STDERR);
List<OutputDataObjectType> outputDataObjectTypeProList = new ArrayList<>();
outputDataObjectTypeProList.add(outputDataObjectProType);
assertEquals(processId, processOutputRepository.addProcessOutputs(outputDataObjectTypeProList, processId));
assertTrue(processRepository.getProcess(processId).getProcessOutputs().size() == 1);
outputDataObjectProType.setValue("oValueP");
processOutputRepository.updateProcessOutputs(outputDataObjectTypeProList, processId);
List<OutputDataObjectType> retrievedProOutputList = processOutputRepository.getProcessOutputs(processId);
assertTrue(retrievedProOutputList.size() == 1);
assertEquals("oValueP", retrievedProOutputList.get(0).getValue());
assertEquals(DataType.STDERR, retrievedProOutputList.get(0).getType());
experimentRepository.removeExperiment(experimentId);
processRepository.removeProcess(processId);
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 803 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentErrorRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.ErrorModel;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ExperimentErrorRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ExperimentErrorRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ExperimentErrorRepository experimentErrorRepository;
public ExperimentErrorRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
experimentErrorRepository = new ExperimentErrorRepository();
}
@Test
public void ExperimentRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId != null);
ErrorModel errorModel = new ErrorModel();
errorModel.setErrorId("error");
String experimentErrorId = experimentErrorRepository.addExperimentError(errorModel, experimentId);
assertTrue(experimentErrorId != null);
assertTrue(experimentRepository.getExperiment(experimentId).getErrors().size() == 1);
errorModel.setActualErrorMessage("message");
experimentErrorRepository.updateExperimentError(errorModel, experimentId);
List<ErrorModel> retrievedErrorList = experimentErrorRepository.getExperimentErrors(experimentId);
assertTrue(retrievedErrorList.size() == 1);
assertEquals("message", retrievedErrorList.get(0).getActualErrorMessage());
experimentRepository.removeExperiment(experimentId);
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 804 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.experiment.UserConfigurationDataModel;
import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
import org.apache.airavata.model.status.ExperimentState;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
public class ExperimentRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ExperimentRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
private String gatewayId;
private String projectId;
public ExperimentRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
}
@Override
public void setUp() throws Exception {
super.setUp();
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
projectId = projectRepository.addProject(project, gatewayId);
}
@Test
public void ExperimentRepositoryTest() throws RegistryException {
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
experimentModel.setGatewayInstanceId("gateway-instance-id");
String experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId != null);
assertEquals(0, experimentRepository.getExperiment(experimentId).getEmailAddressesSize());
experimentModel.setDescription("description");
experimentModel.addToEmailAddresses("notify@example.com");
experimentModel.addToEmailAddresses("notify2@example.com");
experimentRepository.updateExperiment(experimentModel, experimentId);
ExperimentModel retrievedExperimentModel = experimentRepository.getExperiment(experimentId);
assertEquals("description", retrievedExperimentModel.getDescription());
assertEquals(ExperimentType.SINGLE_APPLICATION, retrievedExperimentModel.getExperimentType());
assertEquals("gateway-instance-id", retrievedExperimentModel.getGatewayInstanceId());
assertEquals(1, retrievedExperimentModel.getExperimentStatusSize());
assertEquals(ExperimentState.CREATED, retrievedExperimentModel.getExperimentStatus().get(0).getState());
assertEquals(2, retrievedExperimentModel.getEmailAddressesSize());
assertEquals("notify@example.com", retrievedExperimentModel.getEmailAddresses().get(0));
assertEquals("notify2@example.com", retrievedExperimentModel.getEmailAddresses().get(1));
UserConfigurationDataModel userConfigurationDataModel = new UserConfigurationDataModel();
userConfigurationDataModel.setAiravataAutoSchedule(true);
userConfigurationDataModel.setOverrideManualScheduledParams(false);
ComputationalResourceSchedulingModel computationalResourceSchedulingModel = new ComputationalResourceSchedulingModel();
computationalResourceSchedulingModel.setResourceHostId("resource-host-id");
computationalResourceSchedulingModel.setTotalCPUCount(12);
computationalResourceSchedulingModel.setNodeCount(13);
computationalResourceSchedulingModel.setNumberOfThreads(14);
computationalResourceSchedulingModel.setOverrideAllocationProjectNumber("override-project-num");
computationalResourceSchedulingModel.setOverrideLoginUserName("override-login-username");
computationalResourceSchedulingModel.setOverrideScratchLocation("override-scratch-location");
computationalResourceSchedulingModel.setQueueName("queue-name");
computationalResourceSchedulingModel.setStaticWorkingDir("static-working-dir");
computationalResourceSchedulingModel.setTotalPhysicalMemory(1333);
computationalResourceSchedulingModel.setWallTimeLimit(77);
userConfigurationDataModel.setComputationalResourceScheduling(computationalResourceSchedulingModel);
assertEquals(experimentId, experimentRepository.addUserConfigurationData(userConfigurationDataModel, experimentId));
userConfigurationDataModel.setStorageId("storage2");
experimentRepository.updateUserConfigurationData(userConfigurationDataModel, experimentId);
final UserConfigurationDataModel retrievedUserConfigurationDataModel = experimentRepository.getUserConfigurationData(experimentId);
assertEquals("storage2", retrievedUserConfigurationDataModel.getStorageId());
final ComputationalResourceSchedulingModel retrievedComputationalResourceScheduling = retrievedUserConfigurationDataModel.getComputationalResourceScheduling();
assertNotNull(retrievedComputationalResourceScheduling);
assertEquals("resource-host-id", retrievedComputationalResourceScheduling.getResourceHostId());
assertEquals( 12, retrievedComputationalResourceScheduling.getTotalCPUCount());
assertEquals(13, retrievedComputationalResourceScheduling.getNodeCount());
assertEquals(14, retrievedComputationalResourceScheduling.getNumberOfThreads());
assertEquals("override-project-num", retrievedComputationalResourceScheduling.getOverrideAllocationProjectNumber());
assertEquals("override-login-username", retrievedComputationalResourceScheduling.getOverrideLoginUserName());
assertEquals("override-scratch-location", retrievedComputationalResourceScheduling.getOverrideScratchLocation());
assertEquals("queue-name", retrievedComputationalResourceScheduling.getQueueName());
assertEquals("static-working-dir", retrievedComputationalResourceScheduling.getStaticWorkingDir());
assertEquals(1333, retrievedComputationalResourceScheduling.getTotalPhysicalMemory());
assertEquals(77, retrievedComputationalResourceScheduling.getWallTimeLimit());
experimentRepository.removeExperiment(experimentId);
assertFalse(experimentRepository.isExperimentExist(experimentId));
}
@Test
public void testExperimentInputs() throws RegistryException {
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
experimentModel.setGatewayInstanceId("gateway-instance-id");
InputDataObjectType input1 = new InputDataObjectType();
input1.setName("name1");
input1.setIsRequired(true);
input1.setType(DataType.STRING);
input1.setInputOrder(0);
input1.setApplicationArgument("-arg1");
input1.setDataStaged(true);
input1.setIsReadOnly(true);
input1.setMetaData("{\"foo\": 123}");
input1.setRequiredToAddedToCommandLine(true);
input1.setStandardInput(true);
input1.setStorageResourceId("storageResourceId");
input1.setUserFriendlyDescription("First argument");
input1.setValue("value1");
input1.setOverrideFilename("gaussian.com");
experimentModel.addToExperimentInputs(input1);
String experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId != null);
ExperimentModel retrievedExperimentModel = experimentRepository.getExperiment(experimentId);
assertEquals(1, retrievedExperimentModel.getExperimentInputsSize());
InputDataObjectType retrievedInput1 = retrievedExperimentModel.getExperimentInputs().get(0);
assertEquals("name1", retrievedInput1.getName());
assertTrue(retrievedInput1.isIsRequired());
assertEquals(DataType.STRING, retrievedInput1.getType());
assertEquals(0, retrievedInput1.getInputOrder());
assertEquals("-arg1", retrievedInput1.getApplicationArgument());
assertTrue(retrievedInput1.isDataStaged());
assertTrue(retrievedInput1.isIsReadOnly());
assertEquals("{\"foo\": 123}", retrievedInput1.getMetaData());
assertTrue(retrievedInput1.isRequiredToAddedToCommandLine());
assertTrue(retrievedInput1.isStandardInput());
assertEquals("storageResourceId", retrievedInput1.getStorageResourceId());
assertEquals("First argument", retrievedInput1.getUserFriendlyDescription());
assertEquals("value1", retrievedInput1.getValue());
assertEquals("gaussian.com", retrievedInput1.getOverrideFilename());
// Update values of the input
retrievedInput1.setIsRequired(false);
retrievedInput1.setType(DataType.URI);
retrievedInput1.setInputOrder(1);
retrievedInput1.setApplicationArgument("-arg1a");
retrievedInput1.setDataStaged(false);
retrievedInput1.setIsReadOnly(false);
retrievedInput1.setMetaData("{\"bar\": 456}");
retrievedInput1.setRequiredToAddedToCommandLine(false);
retrievedInput1.setStandardInput(false);
retrievedInput1.setStorageResourceId("storageResourceId2");
retrievedInput1.setUserFriendlyDescription("First argument~");
retrievedInput1.setValue("value1a");
retrievedInput1.setOverrideFilename("gaussian.com-updated");
experimentRepository.updateExperiment(retrievedExperimentModel, experimentId);
retrievedExperimentModel = experimentRepository.getExperiment(experimentId);
assertEquals(1, retrievedExperimentModel.getExperimentInputsSize());
retrievedInput1 = retrievedExperimentModel.getExperimentInputs().get(0);
assertFalse(retrievedInput1.isIsRequired());
assertEquals(DataType.URI, retrievedInput1.getType());
assertEquals(1, retrievedInput1.getInputOrder());
assertEquals("-arg1a", retrievedInput1.getApplicationArgument());
assertFalse(retrievedInput1.isDataStaged());
assertFalse(retrievedInput1.isIsReadOnly());
assertEquals("{\"bar\": 456}", retrievedInput1.getMetaData());
assertFalse(retrievedInput1.isRequiredToAddedToCommandLine());
assertFalse(retrievedInput1.isStandardInput());
assertEquals("storageResourceId2", retrievedInput1.getStorageResourceId());
assertEquals("First argument~", retrievedInput1.getUserFriendlyDescription());
assertEquals("value1a", retrievedInput1.getValue());
assertEquals("gaussian.com-updated", retrievedInput1.getOverrideFilename());
experimentRepository.removeExperiment(experimentId);
assertFalse(experimentRepository.isExperimentExist(experimentId));
}
/**
* Verify that slashes (forward and backward) are replaced with underscores.
*/
@Test
public void testSlashesInExperimentName() throws RegistryException {
// Forward slashes
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name/forward-slash//a");
String experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId.startsWith("name_forward-slash__a"));
// Backward slashes
experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name\\backward-slash\\\\a");
experimentId = experimentRepository.addExperiment(experimentModel);
assertTrue(experimentId.startsWith("name_backward-slash__a"));
}
}
| 805 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessInputRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ProcessInputRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ProcessInputRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ProcessRepository processRepository;
ProcessInputRepository processInputRepository;
public ProcessInputRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
processRepository = new ProcessRepository();
processInputRepository = new ProcessInputRepository();
}
@Test
public void ProcessInputRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
ProcessModel processModel = new ProcessModel(null, experimentId);
String processId = processRepository.addProcess(processModel, experimentId);
assertTrue(processId != null);
InputDataObjectType inputDataObjectProType = new InputDataObjectType();
inputDataObjectProType.setName("inputP");
inputDataObjectProType.setType(DataType.STDOUT);
List<InputDataObjectType> inputDataObjectTypeProList = new ArrayList<>();
inputDataObjectTypeProList.add(inputDataObjectProType);
assertEquals(processId, processInputRepository.addProcessInputs(inputDataObjectTypeProList, processId));
assertTrue(processRepository.getProcess(processId).getProcessInputs().size() == 1);
inputDataObjectProType.setValue("iValueP");
processInputRepository.updateProcessInputs(inputDataObjectTypeProList, processId);
List<InputDataObjectType> retrievedProInputsList = processInputRepository.getProcessInputs(processId);
assertTrue(retrievedProInputsList.size() == 1);
assertEquals("iValueP", retrievedProInputsList.get(0).getValue());
assertEquals(DataType.STDOUT, retrievedProInputsList.get(0).getType());
experimentRepository.removeExperiment(experimentId);
processRepository.removeProcess(processId);
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 806 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/TaskRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.status.TaskState;
import org.apache.airavata.model.status.TaskStatus;
import org.apache.airavata.model.task.TaskModel;
import org.apache.airavata.model.task.TaskTypes;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.charset.StandardCharsets;
import java.util.List;
import static org.junit.Assert.*;
public class TaskRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(TaskRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ProcessRepository processRepository;
TaskRepository taskRepository;
public TaskRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
processRepository = new ProcessRepository();
taskRepository = new TaskRepository();
}
@Test
public void TaskRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
ProcessModel processModel = new ProcessModel(null, experimentId);
String processId = processRepository.addProcess(processModel, experimentId);
TaskModel taskModel = new TaskModel();
taskModel.setTaskType(TaskTypes.JOB_SUBMISSION);
taskModel.setParentProcessId(processId);
taskModel.setSubTaskModel("subtask model".getBytes(StandardCharsets.UTF_8));
TaskStatus taskStatus = new TaskStatus(TaskState.CREATED);
taskStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
taskModel.addToTaskStatuses(taskStatus);
String taskId = taskRepository.addTask(taskModel, processId);
assertTrue(taskId != null);
assertTrue(processRepository.getProcess(processId).getTasks().size() == 1);
taskModel.setTaskType(TaskTypes.MONITORING);
taskRepository.updateTask(taskModel, taskId);
TaskModel retrievedTask = taskRepository.getTask(taskId);
assertEquals(TaskTypes.MONITORING, retrievedTask.getTaskType());
assertArrayEquals("subtask model".getBytes(StandardCharsets.UTF_8), retrievedTask.getSubTaskModel());
assertEquals(1, retrievedTask.getTaskStatusesSize());
assertEquals(TaskState.CREATED, retrievedTask.getTaskStatuses().get(0).getState());
List<String> taskIdList = taskRepository.getTaskIds(DBConstants.Task.PARENT_PROCESS_ID, processId);
assertTrue(taskIdList.size() == 1);
assertTrue(taskIdList.get(0).equals(taskId));
experimentRepository.removeExperiment(experimentId);
processRepository.removeProcess(processId);
taskRepository.removeTask(taskId);
assertFalse(taskRepository.isTaskExist(taskId));
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 807 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/JobRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.job.JobModel;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.status.JobState;
import org.apache.airavata.model.status.JobStatus;
import org.apache.airavata.model.task.TaskModel;
import org.apache.airavata.model.task.TaskTypes;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.entities.expcatalog.JobPK;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
public class JobRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(JobRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ProcessRepository processRepository;
TaskRepository taskRepository;
JobRepository jobRepository;
public JobRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
processRepository = new ProcessRepository();
taskRepository = new TaskRepository();
jobRepository = new JobRepository();
}
@Test
public void JobRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
ProcessModel processModel = new ProcessModel(null, experimentId);
String processId = processRepository.addProcess(processModel, experimentId);
TaskModel taskModel = new TaskModel();
taskModel.setTaskType(TaskTypes.JOB_SUBMISSION);
taskModel.setParentProcessId(processId);
String taskId = taskRepository.addTask(taskModel, processId);
assertTrue(taskId != null);
taskModel.setTaskType(TaskTypes.MONITORING);
taskRepository.updateTask(taskModel, taskId);
JobModel jobModel = new JobModel();
jobModel.setJobId("job");
jobModel.setTaskId(taskId);
jobModel.setJobDescription("jobDescription");
JobStatus jobStatus = new JobStatus(JobState.SUBMITTED);
jobModel.addToJobStatuses(jobStatus);
String jobId = jobRepository.addJob(jobModel, processId);
assertTrue(jobId != null);
assertTrue(taskRepository.getTask(taskId).getJobs().size() == 1);
JobPK jobPK = new JobPK();
jobPK.setJobId(jobId);
jobPK.setTaskId(taskId);
jobModel.setJobName("jobName");
jobRepository.updateJob(jobModel, jobPK);
final JobModel retrievedJob = jobRepository.getJob(jobPK);
assertEquals("jobName", retrievedJob.getJobName());
assertEquals(1, retrievedJob.getJobStatusesSize());
assertEquals(JobState.SUBMITTED, retrievedJob.getJobStatuses().get(0).getJobState());
List<String> jobIdList = jobRepository.getJobIds(DBConstants.Job.TASK_ID, taskId);
assertTrue(jobIdList.size() == 1);
assertTrue(jobIdList.get(0).equals(jobId));
experimentRepository.removeExperiment(experimentId);
processRepository.removeProcess(processId);
taskRepository.removeTask(taskId);
jobRepository.removeJob(jobPK);
assertFalse(jobRepository.isJobExist(jobPK));
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 808 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/NotificationRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.workspace.Notification;
import org.apache.airavata.model.workspace.NotificationPriority;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class NotificationRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(NotificationRepositoryTest.class);
private String testGateway = "testGateway";
NotificationRepository notificationRepository;
public NotificationRepositoryTest() {
super(Database.EXP_CATALOG);
notificationRepository = new NotificationRepository();
}
@Test
public void NotificationRepositoryTest() throws RegistryException {
Notification notification = new Notification();
notification.setNotificationId("notificationId");
notification.setGatewayId(testGateway);
notification.setTitle("notificationTitle");
notification.setNotificationMessage("notificationMessage");
String notificationId = notificationRepository.createNotification(notification);
assertEquals(notification.getNotificationId(), notificationId);
notification.setPriority(NotificationPriority.NORMAL);
notificationRepository.updateNotification(notification);
Notification retrievedNotification = notificationRepository.getNotification(notificationId);
assertEquals(NotificationPriority.NORMAL, retrievedNotification.getPriority());
assertTrue(notificationRepository.getAllGatewayNotifications(testGateway).size() == 1);
notificationRepository.deleteNotification(notificationId);
}
}
| 809 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessErrorRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.ErrorModel;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.ExperimentType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.RegistryException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ProcessErrorRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ProcessErrorRepositoryTest.class);
GatewayRepository gatewayRepository;
ProjectRepository projectRepository;
ExperimentRepository experimentRepository;
ProcessRepository processRepository;
ProcessErrorRepository processErrorRepository;
public ProcessErrorRepositoryTest() {
super(Database.EXP_CATALOG);
gatewayRepository = new GatewayRepository();
projectRepository = new ProjectRepository();
experimentRepository = new ExperimentRepository();
processRepository = new ProcessRepository();
processErrorRepository = new ProcessErrorRepository();
}
@Test
public void ProcessErrorRepositoryTest() throws RegistryException {
Gateway gateway = new Gateway();
gateway.setGatewayId("gateway");
gateway.setDomain("SEAGRID");
gateway.setEmailAddress("abc@d.com");
String gatewayId = gatewayRepository.addGateway(gateway);
Project project = new Project();
project.setName("projectName");
project.setOwner("user");
project.setGatewayId(gatewayId);
String projectId = projectRepository.addProject(project, gatewayId);
ExperimentModel experimentModel = new ExperimentModel();
experimentModel.setProjectId(projectId);
experimentModel.setGatewayId(gatewayId);
experimentModel.setExperimentType(ExperimentType.SINGLE_APPLICATION);
experimentModel.setUserName("user");
experimentModel.setExperimentName("name");
String experimentId = experimentRepository.addExperiment(experimentModel);
ProcessModel processModel = new ProcessModel(null, experimentId);
String processId = processRepository.addProcess(processModel, experimentId);
assertTrue(processId != null);
ErrorModel errorModel = new ErrorModel();
errorModel.setErrorId("error");
String processErrorId = processErrorRepository.addProcessError(errorModel, processId);
assertTrue(processErrorId != null);
assertTrue(processRepository.getProcess(processId).getProcessErrors().size() == 1);
errorModel.setActualErrorMessage("message");
processErrorRepository.updateProcessError(errorModel, processId);
List<ErrorModel> retrievedErrorList = processErrorRepository.getProcessError(processId);
assertTrue(retrievedErrorList.size() == 1);
assertEquals("message", retrievedErrorList.get(0).getActualErrorMessage());
experimentRepository.removeExperiment(experimentId);
processRepository.removeProcess(processId);
gatewayRepository.removeGateway(gatewayId);
projectRepository.removeProject(projectId);
}
}
| 810 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/common/TestBase.java | package org.apache.airavata.registry.core.repositories.common;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.DBInitializer;
import org.apache.airavata.common.utils.DerbyTestUtil;
import org.apache.airavata.common.utils.DerbyUtil;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.registry.core.utils.AppCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.ExpCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.ReplicaCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.WorkflowCatalogDBInitConfig;
import org.junit.After;
import org.junit.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestBase {
private static final Logger logger = LoggerFactory.getLogger(TestBase.class);
public enum Database {APP_CATALOG, EXP_CATALOG, REPLICA_CATALOG, WORKFLOW_CATALOG}
private Database[] databases;
public TestBase(Database... databases) {
if (databases == null) {
throw new IllegalArgumentException("Databases can not be null");
}
this.databases = databases;
}
@Before
public void setUp() throws Exception {
try {
DerbyUtil.startDerbyInServerMode("127.0.0.1", 20000, "airavata", "airavata");
for (Database database: databases) {
logger.info("Creating database " + database.name());
DerbyTestUtil.destroyDatabase(getDatabaseJDBCConfig(database));
DBInitializer.initializeDB(getDBInitConfig(database));
}
} catch (Exception e) {
logger.error("Failed to create the databases" , e);
throw e;
}
}
@After
public void tearDown() throws Exception {
for (Database database: databases) {
System.out.println("Tearing down database " + database.name());
DerbyTestUtil.destroyDatabase(getDatabaseJDBCConfig(database));
}
DerbyUtil.stopDerbyServer();
}
private JDBCConfig getDatabaseJDBCConfig(Database database) {
return getDBInitConfig(database).getJDBCConfig();
}
private DBInitConfig getDBInitConfig(Database database) {
switch (database) {
case APP_CATALOG:
return new AppCatalogDBInitConfig()
.setDbInitScriptPrefix("appcatalog");
case EXP_CATALOG:
return new ExpCatalogDBInitConfig()
.setDbInitScriptPrefix("expcatalog");
case REPLICA_CATALOG:
return new ReplicaCatalogDBInitConfig()
.setDbInitScriptPrefix("replicacatalog");
case WORKFLOW_CATALOG:
return new WorkflowCatalogDBInitConfig()
.setDbInitScriptPrefix("airavataworkflowcatalog");
default:
return null;
}
}
}
| 811 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/replicacatalog/DataReplicaLocationRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.replicacatalog;
import org.apache.airavata.model.data.replica.DataProductModel;
import org.apache.airavata.model.data.replica.DataProductType;
import org.apache.airavata.model.data.replica.DataReplicaLocationModel;
import org.apache.airavata.model.data.replica.ReplicaPersistentType;
import org.apache.airavata.registry.core.entities.replicacatalog.DataReplicaMetadataEntity;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.ReplicaCatalogException;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class DataReplicaLocationRepositoryTest extends TestBase {
private DataProductRepository dataProductRepository;
private DataReplicaLocationRepository dataReplicaLocationRepository;
private String gatewayId = "testGateway";
public DataReplicaLocationRepositoryTest() {
super(Database.REPLICA_CATALOG);
dataProductRepository = new DataProductRepository();
dataReplicaLocationRepository = new DataReplicaLocationRepository();
}
@Test
public void dataReplicaLocationRepositoryTest() throws ReplicaCatalogException {
DataProductModel testDataProductModel = new DataProductModel();
testDataProductModel.setGatewayId(gatewayId);
testDataProductModel.setOwnerName("testUser");
testDataProductModel.setDataProductType(DataProductType.COLLECTION);
testDataProductModel.setProductName("productName");
String productUri = dataProductRepository.registerDataProduct(testDataProductModel);
assertTrue(dataProductRepository.isDataProductExists(productUri));
DataReplicaLocationModel testDataReplicaLocationModel1 = new DataReplicaLocationModel();
testDataReplicaLocationModel1.setReplicaName("replicaName1");
testDataReplicaLocationModel1.setProductUri(productUri);
String replicaId1 = dataReplicaLocationRepository.registerReplicaLocation(testDataReplicaLocationModel1);
DataReplicaLocationModel testDataReplicaLocationModel2 = new DataReplicaLocationModel();
testDataReplicaLocationModel2.setReplicaName("replicaName2");
testDataReplicaLocationModel2.setProductUri(productUri);
String replicaId2 = dataReplicaLocationRepository.registerReplicaLocation(testDataReplicaLocationModel2);
DataReplicaMetadataEntity dataReplicaMetadataEntity1 = new DataReplicaMetadataEntity();
dataReplicaMetadataEntity1.setReplicaId(replicaId1);
dataReplicaMetadataEntity1.setMetadataKey("dataKey1");
dataReplicaMetadataEntity1.setMetadataValue("dataValue1");
DataReplicaMetadataEntity dataReplicaMetadataEntity2 = new DataReplicaMetadataEntity();
dataReplicaMetadataEntity2.setReplicaId(replicaId1);
dataReplicaMetadataEntity2.setMetadataKey("dataKey2");
dataReplicaMetadataEntity2.setMetadataValue("dataValue2");
Map<String, String> dataReplicaMetadataEntityMap = new HashMap<>();
dataReplicaMetadataEntityMap.put(dataReplicaMetadataEntity1.getMetadataKey(), dataReplicaMetadataEntity1.getMetadataValue());
dataReplicaMetadataEntityMap.put(dataReplicaMetadataEntity2.getMetadataKey(), dataReplicaMetadataEntity2.getMetadataValue());
testDataReplicaLocationModel1.setReplicaMetadata(dataReplicaMetadataEntityMap);
testDataReplicaLocationModel1.setReplicaPersistentType(ReplicaPersistentType.TRANSIENT);
assertTrue(dataReplicaLocationRepository.updateReplicaLocation(testDataReplicaLocationModel1));
DataReplicaLocationModel retrievedDataReplicaLocationModel = dataReplicaLocationRepository.getReplicaLocation(replicaId1);
assertTrue(retrievedDataReplicaLocationModel.getReplicaMetadata().size() == 2);
assertEquals(retrievedDataReplicaLocationModel.getReplicaPersistentType(), testDataReplicaLocationModel1.getReplicaPersistentType());
// validUntilTime has a default value
assertEquals(0, retrievedDataReplicaLocationModel.getValidUntilTime());
testDataProductModel.setReplicaLocations(Arrays.asList(testDataReplicaLocationModel1, testDataReplicaLocationModel2));
dataProductRepository.updateDataProduct(testDataProductModel);
assertTrue(dataProductRepository.getDataProduct(productUri).getReplicaLocations().size() == 2);
List<DataReplicaLocationModel> dataReplicaLocationModelList = dataReplicaLocationRepository.getAllReplicaLocations(productUri);
assertTrue(dataReplicaLocationModelList.size() == 2);
dataReplicaLocationRepository.removeReplicaLocation(replicaId1);
dataReplicaLocationRepository.removeReplicaLocation(replicaId2);
dataProductRepository.removeDataProduct(productUri);
}
}
| 812 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/replicacatalog/DataProductRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.replicacatalog;
import org.apache.airavata.model.data.replica.DataProductModel;
import org.apache.airavata.model.data.replica.DataProductType;
import org.apache.airavata.model.data.replica.DataReplicaLocationModel;
import org.apache.airavata.model.data.replica.ReplicaLocationCategory;
import org.apache.airavata.model.data.replica.ReplicaPersistentType;
import org.apache.airavata.registry.core.entities.replicacatalog.DataProductMetadataEntity;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.ReplicaCatalogException;
import org.junit.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class DataProductRepositoryTest extends TestBase {
private DataProductRepository dataProductRepository;
private String gatewayId = "testGateway";
private String userId = "testUser";
private String productName = "testProduct";
public DataProductRepositoryTest() {
super(Database.REPLICA_CATALOG);
}
public void setUp() throws Exception {
super.setUp();
dataProductRepository = new DataProductRepository();
}
@Test
public void dataProductRepositoryTest() throws ReplicaCatalogException {
DataProductModel testDataProductModel1 = new DataProductModel();
testDataProductModel1.setGatewayId(gatewayId);
testDataProductModel1.setOwnerName(userId);
testDataProductModel1.setDataProductType(DataProductType.COLLECTION);
testDataProductModel1.setProductName(productName);
String productUri1 = dataProductRepository.registerDataProduct(testDataProductModel1);
assertTrue(dataProductRepository.isDataProductExists(productUri1));
DataProductModel retrievedDataProductModel1 = dataProductRepository.getDataProduct(productUri1);
assertEquals(retrievedDataProductModel1.getProductUri(), productUri1);
DataProductModel testDataProductModel2 = new DataProductModel();
testDataProductModel2.setGatewayId(gatewayId);
testDataProductModel2.setOwnerName(userId);
testDataProductModel2.setDataProductType(DataProductType.FILE);
testDataProductModel2.setProductName(productName);
String productUri2 = dataProductRepository.registerDataProduct(testDataProductModel2);
assertTrue(dataProductRepository.isDataProductExists(productUri2));
DataProductMetadataEntity dataProductMetadataEntity = new DataProductMetadataEntity();
dataProductMetadataEntity.setProductUri(productUri2);
dataProductMetadataEntity.setMetadataKey("dataKey");
dataProductMetadataEntity.setMetadataValue("dataValue");
Map<String, String> dataProductMetadataEntityMap = new HashMap<>();
dataProductMetadataEntityMap.put(dataProductMetadataEntity.getMetadataKey(), dataProductMetadataEntity.getMetadataValue());
testDataProductModel2.setProductMetadata(dataProductMetadataEntityMap);
testDataProductModel2.setParentProductUri(productUri1);
assertTrue(dataProductRepository.updateDataProduct(testDataProductModel2));
DataProductModel retrievedDataProductModel2 = dataProductRepository.getDataProduct(productUri2);
assertTrue(retrievedDataProductModel2.getProductMetadata().size() == 1);
DataProductModel retrievedParentDataProductModel = dataProductRepository.getParentDataProduct(productUri2);
assertEquals(retrievedParentDataProductModel.getProductUri(), productUri1);
List<DataProductModel> childDataProductList = dataProductRepository.getChildDataProducts(productUri1);
assertTrue(childDataProductList.size() == 1);
List<DataProductModel> dataProductModelList = dataProductRepository.searchDataProductsByName(gatewayId, userId, productName, -1, 0);
assertTrue(dataProductModelList.size() == 2);
dataProductRepository.removeDataProduct(productUri1);
assertFalse(dataProductRepository.isDataProductExists(productUri1));
dataProductRepository.removeDataProduct(productUri2);
}
@Test
public void testDataProductWithReplicaLocation() throws ReplicaCatalogException {
DataProductModel testDataProductModel1 = new DataProductModel();
testDataProductModel1.setGatewayId(gatewayId);
testDataProductModel1.setOwnerName(userId);
testDataProductModel1.setDataProductType(DataProductType.FILE);
testDataProductModel1.setProductName(productName);
DataReplicaLocationModel replicaLocationModel1 = new DataReplicaLocationModel();
replicaLocationModel1.setFilePath("/path/to/file.dat");
replicaLocationModel1.setReplicaDescription("Description of replica");
replicaLocationModel1.setReplicaLocationCategory(ReplicaLocationCategory.GATEWAY_DATA_STORE);
replicaLocationModel1.setReplicaName("file.dat");
replicaLocationModel1.setStorageResourceId("storage_resource_id");
replicaLocationModel1.setReplicaPersistentType(ReplicaPersistentType.PERSISTENT);
testDataProductModel1.addToReplicaLocations(replicaLocationModel1);
String productUri1 = dataProductRepository.registerDataProduct(testDataProductModel1);
assertTrue(dataProductRepository.isDataProductExists(productUri1));
DataProductModel retrievedDataProductModel1 = dataProductRepository.getDataProduct(productUri1);
assertEquals(productUri1, retrievedDataProductModel1.getProductUri());
assertEquals(1, retrievedDataProductModel1.getReplicaLocationsSize());
DataReplicaLocationModel retrievedReplicaLocationModel1 = retrievedDataProductModel1.getReplicaLocations().get(0);
assertEquals(productUri1, retrievedReplicaLocationModel1.getProductUri());
// validUntilTime has a default value
assertEquals(0, retrievedReplicaLocationModel1.getValidUntilTime());
dataProductRepository.removeDataProduct(productUri1);
assertFalse(dataProductRepository.isDataProductExists(productUri1));
}
}
| 813 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
import org.apache.airavata.model.application.io.DataType;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.model.parallelism.ApplicationParallelismType;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.MessageFormat;
import java.util.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ApplicationInterfaceRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ApplicationInterfaceRepositoryTest.class);
private ApplicationInterfaceRepository applicationInterfaceRepository;
private ComputeResourceRepository computeResourceRepository;
private ApplicationDeploymentRepository applicationDeploymentRepository;
private String gatewayId = "testGateway";
public ApplicationInterfaceRepositoryTest() {
super(TestBase.Database.APP_CATALOG);
computeResourceRepository = new ComputeResourceRepository();
applicationInterfaceRepository = new ApplicationInterfaceRepository();
applicationDeploymentRepository = new ApplicationDeploymentRepository();
}
@Test
public void addApplicationModuleTest() throws AppCatalogException {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod1");
applicationModule.setAppModuleName("appMod1Name");
applicationModule.setAppModuleDescription("Description");
applicationModule.setAppModuleVersion("Version1");
String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationModule, savedAppModule));
}
@Test
public void addApplicationModuleWithEmptyIdTest() throws AppCatalogException {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleName("appMod1Name");
applicationModule.setAppModuleDescription("Description");
applicationModule.setAppModuleVersion("Version1");
String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
Assert.assertNotEquals(applicationModule.getAppModuleName(), savedAppModule.getAppModuleId());
Assert.assertTrue(savedAppModule.getAppModuleId().startsWith(applicationModule.getAppModuleName()));
}
@Test
public void deleteApplicationModuleTest() throws AppCatalogException {
Assert.assertNull(applicationInterfaceRepository.getApplicationModule("appMod1"));
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod1");
applicationModule.setAppModuleName("appMod1Name");
String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
Assert.assertNotNull(applicationInterfaceRepository.getApplicationModule(moduleId));
Assert.assertTrue(applicationInterfaceRepository.removeApplicationModule("appMod1"));
Assert.assertNull(applicationInterfaceRepository.getApplicationModule("appMod1"));
}
@Test
public void updateApplicationModuleTest() throws AppCatalogException {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod1");
applicationModule.setAppModuleName("appMod1Name");
applicationModule.setAppModuleDescription("Description");
applicationModule.setAppModuleVersion("Version1");
String moduleId = applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
ApplicationModule savedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationModule, savedAppModule));
savedAppModule.setAppModuleName("Updated Name");
savedAppModule.setAppModuleDescription("Updated Description");
savedAppModule.setAppModuleVersion("new version");
applicationInterfaceRepository.updateApplicationModule("appMod1", savedAppModule);
ApplicationModule updatedAppModule = applicationInterfaceRepository.getApplicationModule(moduleId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(savedAppModule, updatedAppModule));
}
@Test
public void addApplicationInterfaceTest() throws AppCatalogException {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface 1");
applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
assertEquals(applicationInterfaceDescription.getApplicationInterfaceId(), interfaceId);
ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(applicationInterfaceDescription, savedInterface, "__isset_bitfield"));
}
@Test
public void addApplicationInterfaceWithDefaultIdTest() throws AppCatalogException {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationName("app interface 1");
applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
InputDataObjectType input = new InputDataObjectType();
input.setName("input1");
input.setApplicationArgument("Arg");
input.setDataStaged(true);
input.setInputOrder(0);
input.setIsReadOnly(true);
input.setIsRequired(true);
input.setRequiredToAddedToCommandLine(true);
input.setType(DataType.FLOAT);
input.setUserFriendlyDescription("User friendly description");
input.setValue("113");
input.setMetaData("Metadata");
input.setStandardInput(true);
applicationInterfaceDescription.setApplicationInputs(Collections.singletonList(input));
applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
assertTrue(MessageFormat.format("{0} does not start with {1}", interfaceId, "app_interface_1"),
interfaceId.startsWith("app_interface_1"));
}
@Test
public void deleteApplicationInterfaceTest() throws AppCatalogException {
Assert.assertNull(applicationInterfaceRepository.getApplicationModule("interface1"));
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface 1");
applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
InputDataObjectType input = new InputDataObjectType();
input.setName("input1");
input.setApplicationArgument("Arg");
input.setDataStaged(true);
input.setInputOrder(0);
input.setIsReadOnly(true);
input.setIsRequired(true);
input.setRequiredToAddedToCommandLine(true);
input.setType(DataType.FLOAT);
input.setUserFriendlyDescription("User friendly description");
input.setValue("113");
input.setMetaData("Metadata");
input.setStandardInput(true);
applicationInterfaceDescription.addToApplicationInputs(input);
OutputDataObjectType output = new OutputDataObjectType();
output.setName("output1");
output.setValue("value");
output.setType(DataType.FLOAT);
output.setApplicationArgument("Argument");
output.setDataMovement(true);
output.setIsRequired(true);
output.setLocation("/home/");
output.setSearchQuery("Search query");
output.setRequiredToAddedToCommandLine(true);
output.setOutputStreaming(true);
applicationInterfaceDescription.addToApplicationOutputs(output);
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
Assert.assertNotNull(applicationInterfaceRepository.getApplicationInterface(interfaceId));
Assert.assertTrue(applicationInterfaceRepository.removeApplicationInterface(interfaceId));
Assert.assertNull(applicationInterfaceRepository.getApplicationInterface(interfaceId));
}
@Test
public void addModulesToInterfaceTest() throws AppCatalogException {
ApplicationModule applicationModule1 = new ApplicationModule();
applicationModule1.setAppModuleId("appMod1");
applicationModule1.setAppModuleName("appMod1Name");
String moduleId1 = applicationInterfaceRepository.addApplicationModule(applicationModule1, gatewayId);
ApplicationModule applicationModule2 = new ApplicationModule();
applicationModule2.setAppModuleId("appMod2");
applicationModule2.setAppModuleName("appMod2Name");
String moduleId2 = applicationInterfaceRepository.addApplicationModule(applicationModule2, gatewayId);
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface 1");
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
applicationInterfaceRepository.addApplicationModuleMapping(moduleId1, interfaceId);
applicationInterfaceRepository.addApplicationModuleMapping(moduleId2, interfaceId);
ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
Assert.assertEquals(savedInterface.getApplicationModules().get(0), applicationModule1.getAppModuleId());
Assert.assertEquals(savedInterface.getApplicationModules().get(1), applicationModule2.getAppModuleId());
}
@Test
public void addInputsOutputsToInterfaceTest() throws AppCatalogException {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface 1");
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
InputDataObjectType input = new InputDataObjectType();
input.setName("input1");
input.setApplicationArgument("Arg");
input.setDataStaged(true);
input.setInputOrder(0);
input.setIsReadOnly(true);
input.setIsRequired(true);
input.setRequiredToAddedToCommandLine(true);
input.setType(DataType.FLOAT);
input.setUserFriendlyDescription("User friendly description");
input.setValue("113");
input.setMetaData("Metadata");
input.setStandardInput(true);
// TODO missing field
//input.setStorageResourceId("Storage resource id");
OutputDataObjectType output = new OutputDataObjectType();
output.setName("output1");
output.setValue("value");
output.setType(DataType.FLOAT);
output.setApplicationArgument("Argument");
output.setDataMovement(true);
output.setIsRequired(true);
output.setLocation("/home/");
output.setSearchQuery("Search query");
output.setRequiredToAddedToCommandLine(true);
output.setOutputStreaming(true);
output.setMetaData("outputMetaData");
// TODO missing field
//output.setStorageResourceId("Storage resource id");
applicationInterfaceDescription.setApplicationInputs(Collections.singletonList(input));
applicationInterfaceDescription.setApplicationOutputs(Collections.singletonList(output));
applicationInterfaceRepository.updateApplicationInterface(interfaceId, applicationInterfaceDescription);
ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
Assert.assertEquals(1, savedInterface.getApplicationInputsSize());
Assert.assertEquals(1, savedInterface.getApplicationOutputsSize());
Assert.assertTrue(EqualsBuilder.reflectionEquals(input, savedInterface.getApplicationInputs().get(0), "__isset_bitfield"));
Assert.assertTrue(EqualsBuilder.reflectionEquals(output, savedInterface.getApplicationOutputs().get(0), "__isset_bitfield"));
List<InputDataObjectType> savedInputs = applicationInterfaceRepository.getApplicationInputs(interfaceId);
List<OutputDataObjectType> savedOutputs = applicationInterfaceRepository.getApplicationOutputs(interfaceId);
Assert.assertEquals(1, savedInputs.size());
Assert.assertEquals(1, savedOutputs.size());
Assert.assertTrue(EqualsBuilder.reflectionEquals(input, savedInputs.get(0), "__isset_bitfield"));
Assert.assertTrue(EqualsBuilder.reflectionEquals(output, savedOutputs.get(0), "__isset_bitfield"));
}
@Test
public void addAndRemoveInputsOutputsToInterfaceTest() throws AppCatalogException {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface 1");
String interfaceId = applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
InputDataObjectType input = new InputDataObjectType();
input.setName("input1");
input.setApplicationArgument("Arg");
input.setDataStaged(true);
input.setInputOrder(0);
input.setIsReadOnly(true);
input.setIsRequired(true);
input.setRequiredToAddedToCommandLine(true);
input.setType(DataType.FLOAT);
input.setUserFriendlyDescription("User friendly description");
input.setValue("113");
input.setMetaData("Metadata");
input.setStandardInput(true);
InputDataObjectType input2 = new InputDataObjectType();
input2.setName("input2");
input2.setInputOrder(1);
OutputDataObjectType output = new OutputDataObjectType();
output.setName("output1");
output.setValue("value");
output.setType(DataType.FLOAT);
output.setApplicationArgument("Argument");
output.setDataMovement(true);
output.setIsRequired(true);
output.setLocation("/home/");
output.setSearchQuery("Search query");
output.setRequiredToAddedToCommandLine(true);
output.setOutputStreaming(true);
OutputDataObjectType output2 = new OutputDataObjectType();
output2.setName("output2");
applicationInterfaceDescription.setApplicationInputs(Arrays.asList(input, input2));
applicationInterfaceDescription.setApplicationOutputs(Arrays.asList(output, output2));
applicationInterfaceRepository.updateApplicationInterface(interfaceId, applicationInterfaceDescription);
ApplicationInterfaceDescription savedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
Assert.assertEquals(2, savedInterface.getApplicationInputsSize());
Assert.assertEquals(2, savedInterface.getApplicationOutputsSize());
savedInterface.setApplicationInputs(Arrays.asList(input));
savedInterface.setApplicationOutputs(Arrays.asList(output));
applicationInterfaceRepository.updateApplicationInterface(interfaceId, savedInterface);
ApplicationInterfaceDescription updatedInterface = applicationInterfaceRepository.getApplicationInterface(interfaceId);
Assert.assertEquals(1, updatedInterface.getApplicationInputsSize());
Assert.assertEquals(1, updatedInterface.getApplicationOutputsSize());
}
@Test
public void filterApplicationInterfacesTest() throws AppCatalogException {
List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
for (int i = 0 ;i < 5 ;i ++) {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface" + i);
applicationInterfaceDescription.setApplicationName("app interface " + i);
interfaces.add(applicationInterfaceDescription);
applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
}
for (ApplicationInterfaceDescription iface : interfaces) {
Map<String, String> filters = new HashMap<>();
filters.put(DBConstants.ApplicationInterface.APPLICATION_NAME, iface.getApplicationName());
assertEquals(iface.getApplicationName(), applicationInterfaceRepository.getApplicationInterfaces(filters).get(0).getApplicationName());
}
}
@Test
public void filterApplicationModulesTest() throws AppCatalogException {
List<ApplicationModule> modules = new ArrayList<>();
for (int i = 0 ;i < 5 ;i ++) {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod" + i);
applicationModule.setAppModuleName("appMod1Name");
applicationModule.setAppModuleDescription("Description");
applicationModule.setAppModuleVersion("Version1");
modules.add(applicationModule);
applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
}
for (ApplicationModule module : modules) {
Map<String, String> filters = new HashMap<>();
filters.put(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME, module.getAppModuleName());
assertEquals(module.getAppModuleName(),
applicationInterfaceRepository.getApplicationModules(filters).get(0).getAppModuleName());
}
}
@Test
public void filterModuleByWrongCategoryTest() throws AppCatalogException {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod1");
applicationModule.setAppModuleName("appMod1Name");
applicationModule.setAppModuleDescription("Description");
applicationModule.setAppModuleVersion("Version1");
applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
Map<String, String> filters = new HashMap<>();
filters.put("INVALID KEY", applicationModule.getAppModuleName());
try {
applicationInterfaceRepository.getApplicationModules(filters).get(0).getAppModuleName();
Assert.fail("Expected to throw an exception");
} catch (IllegalArgumentException e) {
// ignore
}
}
@Test
public void filterInterfaceByWrongCategoryTest() throws AppCatalogException {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId("interface1");
applicationInterfaceDescription.setApplicationName("app interface");
applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gatewayId);
Map<String, String> filters = new HashMap<>();
filters.put("INVALID KEY", applicationInterfaceDescription.getApplicationName());
try {
applicationInterfaceRepository.getApplicationInterfaces(filters).get(0).getApplicationName();
Assert.fail("Expected to throw an exception");
} catch (IllegalArgumentException e) {
// ignore
}
}
@Test
public void getAccessibleApplicationModulesTest() throws AppCatalogException {
ComputeResourceDescription computeResourceDescription1 = new ComputeResourceDescription();
computeResourceDescription1.setComputeResourceId("compHost1");
computeResourceDescription1.setHostName("compHost1Name");
String computeResourceId1 = computeResourceRepository.addComputeResource(computeResourceDescription1);
ComputeResourceDescription computeResourceDescription2 = new ComputeResourceDescription();
computeResourceDescription2.setComputeResourceId("compHost2");
computeResourceDescription2.setHostName("compHost2Name");
String computeResourceId2 = computeResourceRepository.addComputeResource(computeResourceDescription2);
ApplicationModule applicationModule1 = new ApplicationModule();
applicationModule1.setAppModuleId("appMod1");
applicationModule1.setAppModuleName("appMod1Name");
String moduleId1 = applicationInterfaceRepository.addApplicationModule(applicationModule1, gatewayId);
ApplicationModule applicationModule2 = new ApplicationModule();
applicationModule2.setAppModuleId("appMod2");
applicationModule2.setAppModuleName("appMod2Name");
String moduleId2 = applicationInterfaceRepository.addApplicationModule(applicationModule2, gatewayId);
ApplicationDeploymentDescription applicationDeploymentDescription1 = new ApplicationDeploymentDescription();
applicationDeploymentDescription1.setAppDeploymentId("appDep1");
applicationDeploymentDescription1.setAppModuleId(moduleId1);
applicationDeploymentDescription1.setComputeHostId(computeResourceId1);
applicationDeploymentDescription1.setExecutablePath("executablePath");
applicationDeploymentDescription1.setParallelism(ApplicationParallelismType.SERIAL);
String deploymentId1 = applicationDeploymentRepository.addApplicationDeployment(applicationDeploymentDescription1, gatewayId);
ApplicationDeploymentDescription applicationDeployement = applicationDeploymentRepository.getApplicationDeployement(deploymentId1);
ApplicationDeploymentDescription applicationDeploymentDescription2 = new ApplicationDeploymentDescription();
applicationDeploymentDescription2.setAppDeploymentId("appDep2");
applicationDeploymentDescription2.setAppModuleId(moduleId1);
applicationDeploymentDescription2.setComputeHostId(computeResourceId2);
applicationDeploymentDescription2.setExecutablePath("executablePath");
applicationDeploymentDescription2.setParallelism(ApplicationParallelismType.SERIAL);
String deploymentId2 = applicationDeploymentRepository.addApplicationDeployment(applicationDeploymentDescription2, gatewayId);
List<String> deploymentIds = new ArrayList<>();
deploymentIds.add(deploymentId1);
List<String> compHostIds = new ArrayList<>();
compHostIds.add(computeResourceId1);
List<ApplicationModule> appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
assertEquals(1, appModuleList.size());
assertEquals(moduleId1, appModuleList.get(0).getAppModuleId());
deploymentIds = new ArrayList<>();
deploymentIds.add(deploymentId1);
compHostIds = new ArrayList<>();
compHostIds.add(computeResourceId2);
appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
assertEquals(0, appModuleList.size());
deploymentIds = new ArrayList<>();
deploymentIds.add(deploymentId2);
compHostIds = new ArrayList<>();
compHostIds.add(computeResourceId2);
appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
assertEquals(1, appModuleList.size());
assertEquals(moduleId1, appModuleList.get(0).getAppModuleId());
deploymentIds = new ArrayList<>();
deploymentIds.add(deploymentId1);
deploymentIds.add(deploymentId2);
compHostIds = new ArrayList<>();
compHostIds.add(computeResourceId1);
compHostIds.add(computeResourceId2);
appModuleList = applicationInterfaceRepository.getAccessibleApplicationModules(gatewayId, deploymentIds, compHostIds);
assertEquals(1, appModuleList.size());
assertEquals(moduleId1, appModuleList.get(0).getAppModuleId());
}
@Test
public void getAllApplicationModulesByGatewayTest() throws AppCatalogException {
Map<String, List<ApplicationModule>> moduleStore = new HashMap<>();
for (int j = 0; j < 5; j++) {
List<ApplicationModule> modules = new ArrayList<>();
String gateway = "gateway" + j;
for (int i = 0; i < 5; i++) {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId(gateway + "appMod" + i);
applicationModule.setAppModuleName(gateway + "appMod1Name");
applicationModule.setAppModuleDescription(gateway + "Description");
applicationModule.setAppModuleVersion(gateway + "Version1");
modules.add(applicationModule);
applicationInterfaceRepository.addApplicationModule(applicationModule, gateway);
}
moduleStore.put(gateway, modules);
}
for (int j = 0; j < 5; j++) {
String gateway = "gateway" + j;
List<ApplicationModule> allApplicationModules = applicationInterfaceRepository.getAllApplicationModules(gateway);
Assert.assertEquals(moduleStore.get(gateway).size(), allApplicationModules.size());
for (int i = 0; i < allApplicationModules.size(); i++) {
Assert.assertTrue(EqualsBuilder.reflectionEquals(moduleStore.get(gateway).get(i), allApplicationModules.get(i), "__isset_bitfield"));
}
}
}
@Test
public void getAllApplicationInterfacesByGatewayTest() throws AppCatalogException {
Map<String, List<ApplicationInterfaceDescription>> interfaceStore = new HashMap<>();
for (int j = 0; j < 5; j++) {
List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
String gateway = "gateway" + j;
for (int i = 0; i < 5; i++) {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId(gateway + "interface" + i);
applicationInterfaceDescription.setApplicationName(gateway + "app interface " + i);
applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
interfaces.add(applicationInterfaceDescription);
applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gateway);
}
interfaceStore.put(gateway, interfaces);
}
for (int j = 0; j < 5; j++) {
String gateway = "gateway" + j;
List<ApplicationInterfaceDescription> allApplicationInterfaces = applicationInterfaceRepository.getAllApplicationInterfaces(gateway);
Assert.assertEquals(interfaceStore.get(gateway).size(), allApplicationInterfaces.size());
for (int i = 0; i < allApplicationInterfaces.size(); i++) {
Assert.assertTrue(EqualsBuilder.reflectionEquals(interfaceStore.get(gateway).get(i), allApplicationInterfaces.get(i), "__isset_bitfield"));
}
}
}
@Test
public void getAllApplicationInterfacesWithoutGatewayTest() throws AppCatalogException {
List<ApplicationInterfaceDescription> interfaces = new ArrayList<>();
for (int j = 0; j < 5; j++) {
String gateway = "gateway" + j;
for (int i = 0; i < 5; i++) {
ApplicationInterfaceDescription applicationInterfaceDescription = new ApplicationInterfaceDescription();
applicationInterfaceDescription.setApplicationInterfaceId(gateway + "interface" + i);
applicationInterfaceDescription.setApplicationName(gateway + "app interface " + i);
applicationInterfaceDescription.setApplicationModules(new ArrayList<>());
applicationInterfaceDescription.setApplicationInputs(new ArrayList<>());
applicationInterfaceDescription.setApplicationOutputs(new ArrayList<>());
interfaces.add(applicationInterfaceDescription);
applicationInterfaceRepository.addApplicationInterface(applicationInterfaceDescription, gateway);
}
}
List<String> allApplicationInterfaceIds = applicationInterfaceRepository.getAllApplicationInterfaceIds();
Assert.assertEquals(interfaces.size(), allApplicationInterfaceIds.size());
for (int i = 0; i < interfaces.size(); i++) {
Assert.assertEquals(interfaces.get(i).getApplicationInterfaceId(), allApplicationInterfaceIds.get(i));
}
}
}
| 814 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
import org.apache.airavata.model.data.movement.DataMovementInterface;
import org.apache.airavata.model.data.movement.DataMovementProtocol;
import org.apache.airavata.model.data.movement.GridFTPDataMovement;
import org.apache.airavata.model.data.movement.SCPDataMovement;
import org.apache.airavata.model.data.movement.SecurityProtocol;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Created by skariyat on 3/13/18.
*/
public class StorageResourceRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(StorageResourceRepository.class);
private StorageResourceRepository storageResourceRepository;
public StorageResourceRepositoryTest() {
super(Database.APP_CATALOG);
storageResourceRepository = new StorageResourceRepository();
}
@Test
public void StorageResourceRepositoryTest() throws AppCatalogException {
StorageResourceDescription description = new StorageResourceDescription();
description.setHostName("localhost");
description.setEnabled(true);
description.setStorageResourceDescription("testDescription");
String scpDataMoveId = addSCPDataMovement();
System.out.println("**** SCP DataMoveId****** :" + scpDataMoveId);
String gridFTPDataMoveId = addGridFTPDataMovement();
System.out.println("**** grid FTP DataMoveId****** :" + gridFTPDataMoveId);
List<DataMovementInterface> dataMovementInterfaces = new ArrayList<DataMovementInterface>();
DataMovementInterface scpInterface = new DataMovementInterface();
scpInterface.setDataMovementInterfaceId(scpDataMoveId);
scpInterface.setDataMovementProtocol(DataMovementProtocol.SCP);
scpInterface.setPriorityOrder(1);
DataMovementInterface gridFTPMv = new DataMovementInterface();
gridFTPMv.setDataMovementInterfaceId(gridFTPDataMoveId);
gridFTPMv.setDataMovementProtocol(DataMovementProtocol.GridFTP);
gridFTPMv.setPriorityOrder(2);
dataMovementInterfaces.add(scpInterface);
dataMovementInterfaces.add(gridFTPMv);
description.setDataMovementInterfaces(dataMovementInterfaces);
String resourceId = storageResourceRepository.addStorageResource(description);
StorageResourceDescription storageResourceDescription = null;
if (storageResourceRepository.isExists(resourceId)) {
storageResourceDescription = storageResourceRepository.getStorageResource(resourceId);
assertTrue(storageResourceDescription.getHostName().equals("localhost"));
assertTrue(storageResourceDescription.getStorageResourceDescription().equals("testDescription"));
List<DataMovementInterface> movementInterfaces = storageResourceDescription.getDataMovementInterfaces();
if (movementInterfaces != null && !movementInterfaces.isEmpty()){
for (DataMovementInterface dataMovementInterface : movementInterfaces){
System.out.println("Data Movement Interface Id :" + dataMovementInterface.getDataMovementInterfaceId());
System.out.println("Data Movement Protocol :" + dataMovementInterface.getDataMovementProtocol().toString());
}
}
} else {
fail("Created Storage Resource not found");
}
description.setHostName("localhost2");
storageResourceRepository.updateStorageResource(resourceId, description);
if (storageResourceRepository.isStorageResourceExists(resourceId)) {
storageResourceDescription = storageResourceRepository.getStorageResource(resourceId);
System.out.println("**********Updated Resource name ************* : " + storageResourceDescription.getHostName());
assertTrue(storageResourceDescription.getHostName().equals("localhost2"));
}
assertTrue("Storage resource save successfully", storageResourceDescription != null);
}
public String addSCPDataMovement (){
try {
SCPDataMovement dataMovement = new SCPDataMovement();
dataMovement.setSshPort(22);
dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
return new ComputeResourceRepository().addScpDataMovement(dataMovement);
} catch (AppCatalogException e) {
logger.error(e.getMessage(), e);
}
return null;
}
public String addGridFTPDataMovement (){
try {
GridFTPDataMovement dataMovement = new GridFTPDataMovement();
dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
List<String> endPoints = new ArrayList<String>();
endPoints.add("222.33.43.444");
endPoints.add("23.344.44.454");
dataMovement.setGridFTPEndPoints(endPoints);
return new ComputeResourceRepository().addGridFTPDataMovement(dataMovement);
} catch (AppCatalogException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}
| 815 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourceRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.computeresource.*;
import org.apache.airavata.model.data.movement.*;
import org.apache.airavata.model.parallelism.ApplicationParallelismType;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class ComputeResourceRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepositoryTest.class);
private ComputeResourceRepository computeResourceRepository;
public ComputeResourceRepositoryTest() {
super(Database.APP_CATALOG);
computeResourceRepository = new ComputeResourceRepository();
}
@Test
public void removeBatchQueueTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
List<BatchQueue> batchQueues = computeResourceDescription.getBatchQueues();
Assert.assertTrue(batchQueues.size() > 0);
computeResourceRepository.removeBatchQueue(savedComputeResourceId, batchQueues.get(0).getQueueName());
ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
List<BatchQueue> updatedBatchQueues = updatedComputeResource.getBatchQueues();
Assert.assertEquals(batchQueues.size(), updatedBatchQueues.size() + 1);
Optional<BatchQueue> searchedInterfaceResult = updatedBatchQueues.stream()
.filter(queue -> queue.getQueueName().equals(batchQueues.get(0).getQueueName())).findFirst();
Assert.assertFalse(searchedInterfaceResult.isPresent());
}
@Test
public void removeDataMovementInterfaceTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
List<DataMovementInterface> dataMovementInterfaces = computeResourceDescription.getDataMovementInterfaces();
Assert.assertTrue(dataMovementInterfaces.size() > 0);
computeResourceRepository.removeDataMovementInterface(savedComputeResourceId, dataMovementInterfaces.get(0).getDataMovementInterfaceId());
ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
List<DataMovementInterface> updatedDataMovementInterfaces = updatedComputeResource.getDataMovementInterfaces();
Assert.assertEquals(dataMovementInterfaces.size(), updatedDataMovementInterfaces.size() + 1);
Optional<DataMovementInterface> searchedInterfaceResult = updatedDataMovementInterfaces.stream()
.filter(iface -> iface.getDataMovementInterfaceId().equals(dataMovementInterfaces.get(0).getDataMovementInterfaceId())).findFirst();
Assert.assertFalse(searchedInterfaceResult.isPresent());
}
@Test
public void removeJobSubmissionInterfaceTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
String savedComputeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
List<JobSubmissionInterface> jobSubmissionInterfaces = computeResourceDescription.getJobSubmissionInterfaces();
Assert.assertTrue(jobSubmissionInterfaces.size() > 0);
computeResourceRepository.removeJobSubmissionInterface(savedComputeResourceId, jobSubmissionInterfaces.get(0).getJobSubmissionInterfaceId());
ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(savedComputeResourceId);
List<JobSubmissionInterface> updatedJobSubmissionInterfaces = updatedComputeResource.getJobSubmissionInterfaces();
Assert.assertEquals(jobSubmissionInterfaces.size(), updatedJobSubmissionInterfaces.size() + 1);
Optional<JobSubmissionInterface> searchedInterfaceResult = updatedJobSubmissionInterfaces.stream()
.filter(iface -> iface.getJobSubmissionInterfaceId().equals(jobSubmissionInterfaces.get(0).getJobSubmissionInterfaceId())).findFirst();
Assert.assertFalse(searchedInterfaceResult.isPresent());
}
@Test
public void listComputeResourcesTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
List<String> allIds = new ArrayList<>();
List<ComputeResourceDescription> allComputeResources = new ArrayList<>();
Map<String, String> allComputeResourceMap = new HashMap<>();
for (int i = 0; i < 5; i++) {
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
computeResourceDescription.setHostName("Host" + i);
computeResourceDescription.setEnabled((i%2 == 0));
String savedId = computeResourceRepository.addComputeResource(computeResourceDescription);
allIds.add(savedId);
allComputeResources.add(computeResourceDescription);
allComputeResourceMap.put(savedId, computeResourceDescription.getHostName());
}
List<ComputeResourceDescription> allSavedComputeResources = computeResourceRepository.getAllComputeResourceList();
Assert.assertEquals(5, allSavedComputeResources.size());
for (int i = 0; i < 5; i++) {
Assert.assertTrue(deepCompareComputeResourceDescription(allComputeResources.get(i), allSavedComputeResources.get(i)));
}
Map<String, String> allSavedComputeResourceIds = computeResourceRepository.getAllComputeResourceIdList();
Assert.assertEquals(5, allSavedComputeResourceIds.size());
for (String id : allIds) {
String host = allSavedComputeResourceIds.get(id);
Assert.assertNotNull(host);
Assert.assertEquals(allComputeResourceMap.get(id), host);
}
Map<String, String> allAvailableIds = computeResourceRepository.getAvailableComputeResourceIdList();
Assert.assertEquals(3, allAvailableIds.size());
Assert.assertNotNull(allAvailableIds.get(allIds.get(0)));
Assert.assertNotNull(allAvailableIds.get(allIds.get(2)));
Assert.assertNotNull(allAvailableIds.get(allIds.get(4)));
}
@Test
public void filterComputeResourcesTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
Map<String, String> cfilters = new HashMap<String, String>();
cfilters.put(DBConstants.ComputeResource.HOST_NAME, "localhost");
List<ComputeResourceDescription> computeResourceList = computeResourceRepository.getComputeResourceList(cfilters);
Assert.assertEquals(0, computeResourceList.size());
String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
computeResourceList = computeResourceRepository.getComputeResourceList(cfilters);
Assert.assertEquals(1, computeResourceList.size());
Assert.assertEquals(computeResourceId, computeResourceList.get(0).getComputeResourceId());
try {
cfilters = new HashMap<String, String>();
cfilters.put("Invalid_filter", "localhost");
computeResourceRepository.getComputeResourceList(cfilters);
Assert.fail();
} catch (Exception e) {
// ignore
}
}
@Test
public void updateComputeResourceTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
ComputeResourceDescription savedComputeResource = computeResourceRepository.getComputeResource(computeResourceId);
savedComputeResource.getHostAliases().add("New Alias");
BatchQueue batchQueue = new BatchQueue();
batchQueue.setQueueName("queue new ");
batchQueue.setQueueDescription("que1Desc new");
batchQueue.setMaxRunTime(16);
batchQueue.setMaxNodes(10);
batchQueue.setMaxProcessors(11);
batchQueue.setMaxJobsInQueue(5);
batchQueue.setMaxMemory(2005);
batchQueue.setCpuPerNode(7);
batchQueue.setDefaultNodeCount(11);
batchQueue.setDefaultCPUCount(3);
batchQueue.setDefaultWalltime(34);
batchQueue.setQueueSpecificMacros("Macros new");
batchQueue.setIsDefaultQueue(true);
savedComputeResource.getBatchQueues().add(batchQueue);
savedComputeResource.setCpusPerNode(43);
savedComputeResource.setDefaultWalltime(4343);
computeResourceRepository.updateComputeResource(computeResourceId, savedComputeResource);
ComputeResourceDescription updatedComputeResource = computeResourceRepository.getComputeResource(computeResourceId);
Assert.assertTrue(deepCompareComputeResourceDescription(savedComputeResource, updatedComputeResource));
}
@Test
public void addComputeResourceTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(resourceJobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(resourceJobManager);
String sshSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String scpDataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
GridFTPDataMovement gridFTPDataMovement = prepareGridFTPDataMovement("192.156.33.44");
String gridFTPDataMovementId = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement);
ComputeResourceDescription computeResourceDescription = prepareComputeResource(sshSubmissionId, scpDataMovementId, gridFTPDataMovementId, 4);
computeResourceDescription.setComputeResourceId("manually-entered-id");
Assert.assertNull(computeResourceRepository.getComputeResource("manually-entered-id"));
String computeResourceId = computeResourceRepository.addComputeResource(computeResourceDescription);
Assert.assertEquals("manually-entered-id", computeResourceId);
Assert.assertTrue(computeResourceRepository.isComputeResourceExists(computeResourceId));
ComputeResourceDescription savedComputeResource = computeResourceRepository.getComputeResource("manually-entered-id");
Assert.assertNotNull(savedComputeResource);
Assert.assertTrue(deepCompareComputeResourceDescription(computeResourceDescription, savedComputeResource));
}
@Test
public void addResourceJobManagerTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
ResourceJobManager savedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(resourceJobManager, savedJobManager, "__isset_bitfield"));
}
@Test
public void deleteResourceJobManagerTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
Assert.assertNotNull(computeResourceRepository.getResourceJobManager(jobManagerId));
computeResourceRepository.deleteResourceJobManager(jobManagerId);
Assert.assertNull(computeResourceRepository.getResourceJobManager(jobManagerId));
}
@Test
public void updateResourceJobManagerTest() throws AppCatalogException {
ResourceJobManager resourceJobManager = prepareResourceJobManager();
String jobManagerId = computeResourceRepository.addResourceJobManager(resourceJobManager);
ResourceJobManager savedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
savedJobManager.setJobManagerBinPath("/new bin");
savedJobManager.getJobManagerCommands().put(JobManagerCommand.SHOW_START, "New Command Value");
savedJobManager.getParallelismPrefix().put(ApplicationParallelismType.MPI, "MPI Type");
computeResourceRepository.updateResourceJobManager(jobManagerId, savedJobManager);
ResourceJobManager updatedJobManager = computeResourceRepository.getResourceJobManager(jobManagerId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(savedJobManager, updatedJobManager, "__isset_bitfield"));
}
@Test
public void addUnicoreJobSubmissionTest() throws AppCatalogException {
UnicoreJobSubmission unicoreJobSubmission = prepareUnicoreJobSubmission();
String savedSubmissionId = computeResourceRepository.addUNICOREJobSubmission(unicoreJobSubmission);
UnicoreJobSubmission savedSubmission = computeResourceRepository.getUNICOREJobSubmission(savedSubmissionId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(unicoreJobSubmission, savedSubmission, "__isset_bitfield"));
}
@Test
public void addCloudJobSubmissionTest() throws AppCatalogException {
CloudJobSubmission cloudJobSubmission = prepareCloudJobSubmission();
String savedSubmissionId = computeResourceRepository.addCloudJobSubmission(cloudJobSubmission);
CloudJobSubmission savedSubmission = computeResourceRepository.getCloudJobSubmission(savedSubmissionId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(cloudJobSubmission, savedSubmission, "__isset_bitfield"));
}
@Test
public void addLocalJobSubmissionTest() throws AppCatalogException {
ResourceJobManager jobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(jobManager);
LOCALSubmission localSubmission = prepareLocalJobSubmission(jobManager);
String savedSubmissionId = computeResourceRepository.addLocalJobSubmission(localSubmission);
LOCALSubmission savedSubmission = computeResourceRepository.getLocalJobSubmission(savedSubmissionId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(localSubmission, savedSubmission, "__isset_bitfield"));
}
@Test
public void addSSHJobSubmissionTest() throws AppCatalogException {
ResourceJobManager jobManager = prepareResourceJobManager();
computeResourceRepository.addResourceJobManager(jobManager);
SSHJobSubmission sshJobSubmission = prepareSSHJobSubmission(jobManager);
String jobSubmissionId = computeResourceRepository.addSSHJobSubmission(sshJobSubmission);
SSHJobSubmission savedJobSubmission = computeResourceRepository.getSSHJobSubmission(jobSubmissionId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(sshJobSubmission, savedJobSubmission, "__isset_bitfield"));
}
@Test
public void addSCPDataMovementTest() throws AppCatalogException {
SCPDataMovement scpDataMovement = prepareScpDataMovement();
String dataMovementId = computeResourceRepository.addScpDataMovement(scpDataMovement);
SCPDataMovement savedDataMovement = computeResourceRepository.getSCPDataMovement(dataMovementId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(scpDataMovement, savedDataMovement, "__isset_bitfield"));
}
@Test
public void addLocalDataMovementTest() throws AppCatalogException {
LOCALDataMovement localDataMovement = prepareLocalDataMovement();
String dataMovementId = computeResourceRepository.addLocalDataMovement(localDataMovement);
LOCALDataMovement savedDataMovement = computeResourceRepository.getLocalDataMovement(dataMovementId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(localDataMovement, savedDataMovement, "__isset_bitfield"));
}
@Test
public void addUnicoreDataMovementTest() throws AppCatalogException {
UnicoreDataMovement unicoreDataMovement = prepareUnicoreDataMovement();
String dataMovementId = computeResourceRepository.addUnicoreDataMovement(unicoreDataMovement);
UnicoreDataMovement savedDataMovement = computeResourceRepository.getUNICOREDataMovement(dataMovementId);
Assert.assertTrue(EqualsBuilder.reflectionEquals(unicoreDataMovement, savedDataMovement, "__isset_bitfield"));
}
@Test
public void addGridFTPDataMovementTest() throws AppCatalogException {
GridFTPDataMovement gridFTPDataMovement1 = prepareGridFTPDataMovement("222.33.43.444", "23.344.44.454");
String dataMovementId1 = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement1);
GridFTPDataMovement savedDataMovement1 = computeResourceRepository.getGridFTPDataMovement(dataMovementId1);
Assert.assertTrue(EqualsBuilder.reflectionEquals(gridFTPDataMovement1, savedDataMovement1, "__isset_bitfield"));
GridFTPDataMovement gridFTPDataMovement2 = prepareGridFTPDataMovement("222.33.43.445", "23.344.44.400");
String dataMovementId2 = computeResourceRepository.addGridFTPDataMovement(gridFTPDataMovement2);
GridFTPDataMovement savedDataMovement2 = computeResourceRepository.getGridFTPDataMovement(dataMovementId2);
Assert.assertTrue(EqualsBuilder.reflectionEquals(gridFTPDataMovement2, savedDataMovement2, "__isset_bitfield"));
}
@Test
public void fetchNotAvailableResourceTest() throws AppCatalogException {
Assert.assertNull(computeResourceRepository.getResourceJobManager("INVALID ID"));
Assert.assertNull(computeResourceRepository.getComputeResource("INVALID ID"));
Assert.assertNull(computeResourceRepository.getCloudJobSubmission("INVALID ID"));
Assert.assertEquals(0, computeResourceRepository.getFileSystems("INVALID ID").size());
Assert.assertNull(computeResourceRepository.getGridFTPDataMovement("INVALID ID"));
Assert.assertNull(computeResourceRepository.getLocalDataMovement("INVALID ID"));
Assert.assertNull(computeResourceRepository.getLocalJobSubmission("INVALID ID"));
Assert.assertNull(computeResourceRepository.getSCPDataMovement("INVALID ID"));
Assert.assertNull(computeResourceRepository.getUNICOREDataMovement("INVALID ID"));
}
private ComputeResourceDescription prepareComputeResource(String sshSubmissionId, String scpDataMoveId, String gridFTPDataMoveId, int batchQueueCount) {
ComputeResourceDescription description = new ComputeResourceDescription();
description.setHostName("localhost");
description.setResourceDescription("test compute resource");
description.setGatewayUsageReporting(true);
List<String> ipdaresses = new ArrayList<String>();
ipdaresses.add("222.33.43.444");
ipdaresses.add("23.344.44.454");
description.setIpAddresses(ipdaresses);
JobSubmissionInterface sshSubmissionInt = new JobSubmissionInterface();
sshSubmissionInt.setJobSubmissionInterfaceId(sshSubmissionId);
sshSubmissionInt.setPriorityOrder(1);
sshSubmissionInt.setJobSubmissionProtocol(JobSubmissionProtocol.SSH);
List<JobSubmissionInterface> interfaceList = new ArrayList<JobSubmissionInterface>();
interfaceList.add(sshSubmissionInt);
description.setJobSubmissionInterfaces(interfaceList);
List<DataMovementInterface> dataMovementInterfaces = new ArrayList<DataMovementInterface>();
DataMovementInterface scpInterface = new DataMovementInterface();
scpInterface.setDataMovementInterfaceId(scpDataMoveId);
scpInterface.setDataMovementProtocol(DataMovementProtocol.SCP);
scpInterface.setPriorityOrder(1);
DataMovementInterface gridFTPMv = new DataMovementInterface();
gridFTPMv.setDataMovementInterfaceId(gridFTPDataMoveId);
gridFTPMv.setDataMovementProtocol(DataMovementProtocol.GridFTP);
gridFTPMv.setPriorityOrder(2);
dataMovementInterfaces.add(scpInterface);
dataMovementInterfaces.add(gridFTPMv);
description.setDataMovementInterfaces(dataMovementInterfaces);
List<BatchQueue> batchQueueList = new ArrayList<BatchQueue>();
for (int i = 0; i < batchQueueCount; i++) {
BatchQueue batchQueue = new BatchQueue();
batchQueue.setQueueName("queue" + i);
batchQueue.setQueueDescription("que1Desc" + i);
batchQueue.setMaxRunTime(10 + i);
batchQueue.setMaxNodes(4 + i);
batchQueue.setMaxProcessors(5 + i);
batchQueue.setMaxJobsInQueue(i);
batchQueue.setMaxMemory(2000 + i);
batchQueue.setCpuPerNode(1 + i);
batchQueue.setDefaultNodeCount(3 + i);
batchQueue.setDefaultCPUCount(15 + i);
batchQueue.setDefaultWalltime(2 + i);
batchQueue.setQueueSpecificMacros("Macros " + i);
batchQueue.setIsDefaultQueue(i == 0);
batchQueueList.add(batchQueue);
}
description.setBatchQueues(batchQueueList);
Map<FileSystems, String> fileSysMap = new HashMap<FileSystems, String>();
fileSysMap.put(FileSystems.HOME, "/home");
fileSysMap.put(FileSystems.SCRATCH, "/tmp");
description.setFileSystems(fileSysMap);
description.setHostAliases(new ArrayList<>());
return description;
}
private ResourceJobManager prepareResourceJobManager() {
ResourceJobManager jobManager = new ResourceJobManager();
jobManager.setResourceJobManagerType(ResourceJobManagerType.PBS);
jobManager.setPushMonitoringEndpoint("monitor ep");
jobManager.setJobManagerBinPath("/bin");
Map<ApplicationParallelismType, String> parallelismPrefix = new HashMap<>();
parallelismPrefix.put(ApplicationParallelismType.CCM, "ccm parallel");
jobManager.setParallelismPrefix(parallelismPrefix);
Map<JobManagerCommand, String> commands = new HashMap<JobManagerCommand, String>();
commands.put(JobManagerCommand.SUBMISSION, "Sub command");
commands.put(JobManagerCommand.SHOW_QUEUE, "show q command");
jobManager.setJobManagerCommands(commands);
return jobManager;
}
private UnicoreJobSubmission prepareUnicoreJobSubmission() {
UnicoreJobSubmission unicoreJobSubmission = new UnicoreJobSubmission();
unicoreJobSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
unicoreJobSubmission.setUnicoreEndPointURL("http://endpoint");
return unicoreJobSubmission;
}
private CloudJobSubmission prepareCloudJobSubmission() {
CloudJobSubmission cloudJobSubmission = new CloudJobSubmission();
cloudJobSubmission.setExecutableType("Executable");
cloudJobSubmission.setProviderName(ProviderName.EC2);
cloudJobSubmission.setNodeId("ec2 node");
cloudJobSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
cloudJobSubmission.setUserAccountName("user1");
return cloudJobSubmission;
}
private LOCALSubmission prepareLocalJobSubmission(ResourceJobManager jobManager) {
LOCALSubmission localSubmission = new LOCALSubmission();
localSubmission.setResourceJobManager(jobManager);
localSubmission.setSecurityProtocol(SecurityProtocol.KERBEROS);
return localSubmission;
}
private SSHJobSubmission prepareSSHJobSubmission(ResourceJobManager jobManager) {
SSHJobSubmission jobSubmission = new SSHJobSubmission();
jobSubmission.setSshPort(22);
jobSubmission.setSecurityProtocol(SecurityProtocol.GSI);
jobSubmission.setMonitorMode(MonitorMode.POLL_JOB_MANAGER);
jobSubmission.setResourceJobManager(jobManager);
return jobSubmission;
}
private LOCALDataMovement prepareLocalDataMovement() {
return new LOCALDataMovement();
}
private SCPDataMovement prepareScpDataMovement() {
SCPDataMovement dataMovement = new SCPDataMovement();
dataMovement.setSshPort(22);
dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
return dataMovement;
}
private UnicoreDataMovement prepareUnicoreDataMovement() {
UnicoreDataMovement dataMovement = new UnicoreDataMovement();
dataMovement.setSecurityProtocol(SecurityProtocol.KERBEROS);
dataMovement.setUnicoreEndPointURL("http://endpoint");
return dataMovement;
}
private GridFTPDataMovement prepareGridFTPDataMovement(String... endpoints) {
GridFTPDataMovement dataMovement = new GridFTPDataMovement();
dataMovement.setSecurityProtocol(SecurityProtocol.SSH_KEYS);
List<String> endPoints = new ArrayList<String>();
endPoints.addAll(endPoints);
dataMovement.setGridFTPEndPoints(endPoints);
return dataMovement;
}
private boolean deepCompareComputeResourceDescription(ComputeResourceDescription expected, ComputeResourceDescription actual) {
boolean equals = EqualsBuilder.reflectionEquals(expected, actual,
"__isset_bitfield", "batchQueues", "fileSystems", "jobSubmissionInterfaces", "dataMovementInterfaces", "ipAddresses", "hostAliases");
equals = equals & deepCompareArrayList(expected.getBatchQueues(), actual.getBatchQueues(), false);
equals = equals & deepCompareArrayList(expected.getJobSubmissionInterfaces(), actual.getJobSubmissionInterfaces(), false);
equals = equals & deepCompareArrayList(expected.getDataMovementInterfaces(), actual.getDataMovementInterfaces(), false);
equals = equals & deepCompareArrayList(expected.getIpAddresses(), actual.getIpAddresses(), false);
equals = equals & deepCompareArrayList(expected.getHostAliases(), actual.getHostAliases(), false);
return equals;
}
private boolean deepCompareArrayList(List expected, List actual, boolean preferOrder) {
if ((expected == null) == (actual == null)) {
if (expected == null) {
return true;
}
if (expected.size() != actual.size()) {
return false;
}
boolean equals = true;
if (preferOrder) {
for (int i = 0; i < expected.size(); i++) {
equals = equals & EqualsBuilder.reflectionEquals(expected.get(i), actual.get(i), "__isset_bitfield");
}
} else {
boolean checked[] = new boolean[expected.size()];
for (int i = 0; i < expected.size(); i++) {
equals = false;
for (int j = 0; j < expected.size(); j++) {
if (checked[j]) {
continue;
}
equals = equals | EqualsBuilder.reflectionEquals(expected.get(i), actual.get(j), "__isset_bitfield");
if (equals) {
checked[j] = true;
break;
}
}
if (!equals) {
break;
}
}
}
return equals;
} else {
return false;
}
}
}
| 816 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.appcatalog.computeresource.BatchQueue;
import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
import org.apache.airavata.model.appcatalog.groupresourceprofile.*;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class GroupResourceProfileRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepository.class);
private ComputeResourceRepository computeResourceRepository;
private GroupResourceProfileRepository groupResourceProfileRepository;
private String gatewayId = "TEST_GATEWAY";
private String groupResourceProfileId = null;
private String resourceId1 = null;
private String resourceId2 = null;
private final String QUEUE1_NAME = "queue1";
private final String QUEUE2_NAME = "queue2";
public GroupResourceProfileRepositoryTest() {
super(Database.APP_CATALOG);
computeResourceRepository = new ComputeResourceRepository();
groupResourceProfileRepository = new GroupResourceProfileRepository();
}
@Override
public void setUp() throws Exception {
super.setUp();
ComputeResourceDescription description = new ComputeResourceDescription();
description.setHostName("localhost");
description.setResourceDescription("test compute resource");
List<String> ipdaresses = new ArrayList<String>();
ipdaresses.add("222.33.43.444");
ipdaresses.add("23.344.44.454");
description.setIpAddresses(ipdaresses);
BatchQueue batchQueue1 = new BatchQueue();
batchQueue1.setQueueName(QUEUE1_NAME);
batchQueue1.setQueueDescription("que1Desc1");
batchQueue1.setMaxRunTime(10);
batchQueue1.setMaxNodes(4);
batchQueue1.setMaxJobsInQueue(1);
BatchQueue batchQueue2 = new BatchQueue();
batchQueue2.setQueueName(QUEUE2_NAME);
batchQueue2.setQueueDescription("que1Desc2");
batchQueue2.setMaxRunTime(10);
batchQueue2.setMaxNodes(4);
batchQueue2.setMaxJobsInQueue(1);
List<BatchQueue> batchQueueList = new ArrayList<BatchQueue>();
batchQueueList.add(batchQueue1);
batchQueueList.add(batchQueue2);
description.setBatchQueues(batchQueueList);
this.resourceId1 = computeResourceRepository.addComputeResource(description);
ComputeResourceDescription cm2 = new ComputeResourceDescription();
cm2.setHostName("localhost2");
cm2.setResourceDescription("test compute host");
BatchQueue cm_batchQueue1 = new BatchQueue();
cm_batchQueue1.setQueueName("cmqueue1");
cm_batchQueue1.setQueueDescription("cmque1Desc1");
cm_batchQueue1.setMaxRunTime(10);
cm_batchQueue1.setMaxNodes(4);
cm_batchQueue1.setMaxJobsInQueue(1);
BatchQueue cm_batchQueue2 = new BatchQueue();
cm_batchQueue2.setQueueName("cmqueue2");
cm_batchQueue2.setQueueDescription("cmque1Desc2");
cm_batchQueue2.setMaxRunTime(10);
cm_batchQueue2.setMaxNodes(4);
cm_batchQueue2.setMaxJobsInQueue(1);
List<BatchQueue> cmbatchQueueList = new ArrayList<BatchQueue>();
cmbatchQueueList.add(cm_batchQueue1);
cmbatchQueueList.add(cm_batchQueue2);
cm2.setBatchQueues(cmbatchQueueList);
this.resourceId2 = computeResourceRepository.addComputeResource(cm2);
}
@Test
public void GroupResourceProfileRepositoryTest() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
groupResourceProfile.setDefaultCredentialStoreToken("test-cred-store-token");
GroupAccountSSHProvisionerConfig groupAccountSSHProvisionerConfig = new GroupAccountSSHProvisionerConfig();
groupAccountSSHProvisionerConfig.setResourceId(resourceId1);
groupAccountSSHProvisionerConfig.setConfigName("configName");
groupAccountSSHProvisionerConfig.setConfigValue("configvalue");
ComputeResourceReservation reservation1 = new ComputeResourceReservation();
reservation1.setReservationName("test-reservation1");
reservation1.setStartTime(AiravataUtils.getCurrentTimestamp().getTime());
reservation1.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 100000);
reservation1.addToQueueNames(QUEUE1_NAME);
reservation1.addToQueueNames(QUEUE2_NAME);
ComputeResourceReservation reservation2 = new ComputeResourceReservation();
reservation2.setReservationName("test-reservation2");
reservation2.setStartTime(AiravataUtils.getCurrentTimestamp().getTime() + 200000);
reservation2.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 300000);
reservation2.addToQueueNames(QUEUE1_NAME);
GroupComputeResourcePreference groupComputeResourcePreference1 = new GroupComputeResourcePreference();
groupComputeResourcePreference1.setComputeResourceId(resourceId1);
groupComputeResourcePreference1.addToGroupSSHAccountProvisionerConfigs(groupAccountSSHProvisionerConfig);
groupComputeResourcePreference1.addToReservations(reservation1);
groupComputeResourcePreference1.addToReservations(reservation2);
GroupComputeResourcePreference groupComputeResourcePreference2 = new GroupComputeResourcePreference();
groupComputeResourcePreference2.setComputeResourceId(resourceId2);
List<GroupComputeResourcePreference> groupComputeResourcePreferenceList = new ArrayList<>();
groupComputeResourcePreferenceList.add(groupComputeResourcePreference1);
groupComputeResourcePreferenceList.add(groupComputeResourcePreference2);
groupResourceProfile.setComputePreferences(groupComputeResourcePreferenceList);
ComputeResourcePolicy computeResourcePolicy = new ComputeResourcePolicy();
computeResourcePolicy.setComputeResourceId(resourceId1);
computeResourcePolicy.addToAllowedBatchQueues("queue1");
ComputeResourcePolicy computeResourcePolicy2 = new ComputeResourcePolicy();
computeResourcePolicy2.setComputeResourceId(resourceId2);
computeResourcePolicy2.addToAllowedBatchQueues("cmqueue1");
List<ComputeResourcePolicy> computeResourcePolicyList = new ArrayList<>();
computeResourcePolicyList.add(computeResourcePolicy);
computeResourcePolicyList.add(computeResourcePolicy2);
groupResourceProfile.setComputeResourcePolicies(computeResourcePolicyList);
BatchQueueResourcePolicy batchQueueResourcePolicy = new BatchQueueResourcePolicy();
batchQueueResourcePolicy.setComputeResourceId(resourceId1);
batchQueueResourcePolicy.setQueuename("queue1");
batchQueueResourcePolicy.setMaxAllowedCores(2);
batchQueueResourcePolicy.setMaxAllowedWalltime(10);
BatchQueueResourcePolicy batchQueueResourcePolicy2 = new BatchQueueResourcePolicy();
batchQueueResourcePolicy2.setComputeResourceId(resourceId2);
batchQueueResourcePolicy2.setQueuename("cmqueue1");
batchQueueResourcePolicy2.setMaxAllowedCores(3);
batchQueueResourcePolicy2.setMaxAllowedWalltime(12);
List<BatchQueueResourcePolicy> batchQueueResourcePolicyList = new ArrayList<>();
batchQueueResourcePolicyList.add(batchQueueResourcePolicy);
batchQueueResourcePolicyList.add(batchQueueResourcePolicy2);
groupResourceProfile.setBatchQueueResourcePolicies(batchQueueResourcePolicyList);
groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
String computeResourcePolicyId1 = null;
String batchQueueResourcePolicyId2 = null;
if (groupResourceProfileRepository.isGroupResourceProfileExists(groupResourceProfileId)) {
GroupResourceProfile getGroupResourceProfile = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId);
assertTrue(getGroupResourceProfile.getGatewayId().equals(gatewayId));
assertTrue(getGroupResourceProfile.getGroupResourceProfileId().equals(groupResourceProfileId));
assertEquals("test-cred-store-token", getGroupResourceProfile.getDefaultCredentialStoreToken());
assertTrue(getGroupResourceProfile.getComputePreferences().size() == 2);
assertTrue(getGroupResourceProfile.getComputeResourcePolicies().size() == 2);
assertTrue(getGroupResourceProfile.getBatchQueueResourcePolicies().size() == 2);
computeResourcePolicyId1 = getGroupResourceProfile.getComputeResourcePolicies()
.stream()
.filter(crp -> crp.getComputeResourceId().equals(resourceId1))
.map(crp -> crp.getResourcePolicyId())
.findFirst()
.get();
batchQueueResourcePolicyId2 = getGroupResourceProfile.getBatchQueueResourcePolicies()
.stream()
.filter(bqrp -> bqrp.getComputeResourceId().equals(resourceId2))
.map(bqrp -> bqrp.getResourcePolicyId())
.findFirst()
.get();
}
assertTrue(groupResourceProfileRepository.getGroupComputeResourcePreference(resourceId1,groupResourceProfileId) != null);
assertTrue(groupResourceProfileRepository.getGroupComputeResourcePreference(resourceId1,groupResourceProfileId).getGroupSSHAccountProvisionerConfigs().size() == 1);
// verify reservation1
assertEquals(2, groupResourceProfileRepository.getGroupComputeResourcePreference(resourceId1,groupResourceProfileId).getReservations().size());
ComputeResourceReservation retrievedReservation1 = groupResourceProfileRepository.getGroupComputeResourcePreference(resourceId1, groupResourceProfileId).getReservations().get(0);
assertEquals(reservation1.getReservationName(), retrievedReservation1.getReservationName());
assertEquals(reservation1.getStartTime(), retrievedReservation1.getStartTime());
assertEquals(reservation1.getEndTime(), retrievedReservation1.getEndTime());
ComputeResourcePolicy getComputeResourcePolicy = groupResourceProfileRepository.getComputeResourcePolicy(computeResourcePolicyId1);
assertTrue(getComputeResourcePolicy.getAllowedBatchQueues().get(0).equals("queue1"));
BatchQueueResourcePolicy getBatchQueuePolicy = groupResourceProfileRepository.getBatchQueueResourcePolicy(batchQueueResourcePolicyId2);
assertTrue(getBatchQueuePolicy != null);
assertTrue(getBatchQueuePolicy.getMaxAllowedCores() == 3);
assertTrue(getBatchQueuePolicy.getMaxAllowedWalltime() == 12);
assertTrue(groupResourceProfileRepository.getAllGroupResourceProfiles(gatewayId, null).size() == 0);
assertTrue(groupResourceProfileRepository.getAllGroupResourceProfiles(gatewayId, Collections.emptyList()).size() == 0);
assertTrue(groupResourceProfileRepository.getAllGroupComputeResourcePreferences(groupResourceProfileId).size() == 2);
assertTrue(groupResourceProfileRepository.getAllGroupComputeResourcePolicies(groupResourceProfileId).size() == 2);
assertTrue(groupResourceProfileRepository.getAllGroupBatchQueueResourcePolicies(groupResourceProfileId).size() == 2);
// AIRAVATA-2872 Test setting resourceSpecificCredentialStoreToken to a value and then changing it to null
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId);
GroupComputeResourcePreference retrievedGroupComputeResourcePreference = retrievedGroupResourceProfile.getComputePreferences().stream()
.filter(pref -> pref.getComputeResourceId().equals(resourceId1))
.findFirst()
.get();
assertNull(retrievedGroupComputeResourcePreference.getResourceSpecificCredentialStoreToken());
retrievedGroupComputeResourcePreference.setResourceSpecificCredentialStoreToken("abc123");
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile);
GroupResourceProfile retrievedGroupResourceProfile2 = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId);
GroupComputeResourcePreference retrievedGroupComputeResourcePreference2 = retrievedGroupResourceProfile2.getComputePreferences().stream()
.filter(pref -> pref.getComputeResourceId().equals(resourceId1))
.findFirst()
.get();
assertEquals("abc123", retrievedGroupComputeResourcePreference2.getResourceSpecificCredentialStoreToken());
retrievedGroupComputeResourcePreference2.setResourceSpecificCredentialStoreToken(null);
assertNull(retrievedGroupComputeResourcePreference2.getResourceSpecificCredentialStoreToken());
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile2);
GroupResourceProfile retrievedGroupResourceProfile3 = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId);
GroupComputeResourcePreference retrievedGroupComputeResourcePreference3 = retrievedGroupResourceProfile3.getComputePreferences().stream()
.filter(pref -> pref.getComputeResourceId().equals(resourceId1))
.findFirst()
.get();
assertNull(retrievedGroupComputeResourcePreference3.getResourceSpecificCredentialStoreToken());
// Orphan removal test
assertEquals(2, retrievedGroupResourceProfile3.getComputePreferencesSize());
retrievedGroupResourceProfile3.setComputePreferences(retrievedGroupResourceProfile3.getComputePreferences().subList(0, 1));
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile3);
GroupResourceProfile retrievedGroupResourceProfile4 = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId);
assertEquals(1, retrievedGroupResourceProfile4.getComputePreferencesSize());
groupResourceProfileRepository.removeGroupResourceProfile(groupResourceProfileId);
}
@Test
public void testUpdatingGroupResourceProfileWithoutCreationTime() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
groupResourceProfile.setDefaultCredentialStoreToken("test-cred-store-token");
// Simulate what is like for a client that only gets back the id from
// the create operation but not any fields, like creation time, that are
// populated by the create operation
GroupResourceProfile cloneGroupResourceProfile = groupResourceProfile.deepCopy();
String groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
long creationTime = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId).getCreationTime();
cloneGroupResourceProfile.setGroupResourceProfileId(groupResourceProfileId);
groupResourceProfileRepository.updateGroupResourceProfile(cloneGroupResourceProfile);
long creationTimeAfterUpdate = groupResourceProfileRepository.getGroupResourceProfile(groupResourceProfileId).getCreationTime();
Assert.assertEquals("creationTime should be the same after update", creationTime, creationTimeAfterUpdate);
}
@Test
public void testRemovingReservation() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
ComputeResourceReservation reservation1 = new ComputeResourceReservation();
reservation1.setReservationName("test-reservation1");
reservation1.setStartTime(AiravataUtils.getCurrentTimestamp().getTime());
reservation1.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 100000);
reservation1.addToQueueNames(QUEUE1_NAME);
reservation1.addToQueueNames(QUEUE2_NAME);
ComputeResourceReservation reservation2 = new ComputeResourceReservation();
reservation2.setReservationName("test-reservation2");
reservation2.setStartTime(AiravataUtils.getCurrentTimestamp().getTime() + 200000);
reservation2.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 300000);
reservation2.addToQueueNames(QUEUE1_NAME);
GroupComputeResourcePreference groupComputeResourcePreference1 = new GroupComputeResourcePreference();
groupComputeResourcePreference1.setComputeResourceId(resourceId1);
groupComputeResourcePreference1.addToReservations(reservation1);
groupComputeResourcePreference1.addToReservations(reservation2);
groupResourceProfile.addToComputePreferences(groupComputeResourcePreference1);
String groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
// Remove one of the reservations
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(2, retrievedReservations.size());
retrievedReservations.remove(1);
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile);
}
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(1, retrievedReservations.size());
assertEquals(reservation1.getReservationName(), retrievedReservations.get(0).getReservationName());
}
}
@Test
public void testUpdatingReservation() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
ComputeResourceReservation reservation1 = new ComputeResourceReservation();
reservation1.setReservationName("test-reservation1");
reservation1.setStartTime(AiravataUtils.getCurrentTimestamp().getTime());
reservation1.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 100000);
reservation1.addToQueueNames(QUEUE1_NAME);
reservation1.addToQueueNames(QUEUE2_NAME);
ComputeResourceReservation reservation2 = new ComputeResourceReservation();
reservation2.setReservationName("test-reservation2");
reservation2.setStartTime(AiravataUtils.getCurrentTimestamp().getTime() + 200000);
reservation2.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 300000);
reservation2.addToQueueNames(QUEUE1_NAME);
GroupComputeResourcePreference groupComputeResourcePreference1 = new GroupComputeResourcePreference();
groupComputeResourcePreference1.setComputeResourceId(resourceId1);
groupComputeResourcePreference1.addToReservations(reservation1);
groupComputeResourcePreference1.addToReservations(reservation2);
groupResourceProfile.addToComputePreferences(groupComputeResourcePreference1);
String groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
// Update one of the reservations
long newStartTime = AiravataUtils.getCurrentTimestamp().getTime() + 1000*1000;
long newEndTime = AiravataUtils.getCurrentTimestamp().getTime() + 2*1000*1000;
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(2, retrievedReservations.size());
// push into future, should sort second on next retrieval
retrievedReservations.get(0).setStartTime(newStartTime);
retrievedReservations.get(0).setEndTime(newEndTime);
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile);
}
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(2, retrievedReservations.size());
// first reservation should now sort second
ComputeResourceReservation reservation = retrievedReservations.get(1);
assertEquals(reservation1.getReservationName(), reservation.getReservationName());
assertEquals(newStartTime, reservation.getStartTime());
assertEquals(newEndTime, reservation.getEndTime());
}
}
@Test
public void testAddingQueueToReservation() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
ComputeResourceReservation reservation1 = new ComputeResourceReservation();
reservation1.setReservationName("test-reservation1");
reservation1.setStartTime(AiravataUtils.getCurrentTimestamp().getTime());
reservation1.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 100000);
reservation1.addToQueueNames(QUEUE1_NAME);
GroupComputeResourcePreference groupComputeResourcePreference1 = new GroupComputeResourcePreference();
groupComputeResourcePreference1.setComputeResourceId(resourceId1);
groupComputeResourcePreference1.addToReservations(reservation1);
groupResourceProfile.addToComputePreferences(groupComputeResourcePreference1);
String groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
// add queue to the reservation
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(1, retrievedReservations.size());
ComputeResourceReservation reservation = retrievedReservations.get(0);
assertEquals(1, reservation.getQueueNamesSize());
reservation.addToQueueNames(QUEUE2_NAME);
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile);
}
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(1, retrievedReservations.size());
ComputeResourceReservation reservation = retrievedReservations.get(0);
assertEquals(new HashSet<>(Arrays.asList(QUEUE1_NAME, QUEUE2_NAME)), new HashSet<>(reservation.getQueueNames()));
}
}
@Test
public void testRemovingQueueFromReservation() throws AppCatalogException {
GroupResourceProfile groupResourceProfile = new GroupResourceProfile();
groupResourceProfile.setGatewayId(gatewayId);
groupResourceProfile.setGroupResourceProfileName("TEST_GROUP_PROFILE_NAME");
ComputeResourceReservation reservation1 = new ComputeResourceReservation();
reservation1.setReservationName("test-reservation1");
reservation1.setStartTime(AiravataUtils.getCurrentTimestamp().getTime());
reservation1.setEndTime(AiravataUtils.getCurrentTimestamp().getTime() + 100000);
reservation1.addToQueueNames(QUEUE1_NAME);
reservation1.addToQueueNames(QUEUE2_NAME);
GroupComputeResourcePreference groupComputeResourcePreference1 = new GroupComputeResourcePreference();
groupComputeResourcePreference1.setComputeResourceId(resourceId1);
groupComputeResourcePreference1.addToReservations(reservation1);
groupResourceProfile.addToComputePreferences(groupComputeResourcePreference1);
String groupResourceProfileId = groupResourceProfileRepository.addGroupResourceProfile(groupResourceProfile);
// add queue to the reservation
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(1, retrievedReservations.size());
ComputeResourceReservation reservation = retrievedReservations.get(0);
assertEquals(new HashSet<>(Arrays.asList(QUEUE1_NAME, QUEUE2_NAME)), new HashSet<>(reservation.getQueueNames()));
reservation.unsetQueueNames();
reservation.addToQueueNames(QUEUE1_NAME);
groupResourceProfileRepository.updateGroupResourceProfile(retrievedGroupResourceProfile);
}
{
GroupResourceProfile retrievedGroupResourceProfile = groupResourceProfileRepository
.getGroupResourceProfile(groupResourceProfileId);
List<ComputeResourceReservation> retrievedReservations = retrievedGroupResourceProfile
.getComputePreferences().get(0).getReservations();
assertEquals(1, retrievedReservations.size());
ComputeResourceReservation reservation = retrievedReservations.get(0);
assertEquals(new HashSet<>(Arrays.asList(QUEUE1_NAME)), new HashSet<>(reservation.getQueueNames()));
}
}
}
| 817 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationDeploymentRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationDeploymentDescription;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appdeployment.CommandObject;
import org.apache.airavata.model.appcatalog.appdeployment.SetEnvPaths;
import org.apache.airavata.model.appcatalog.appinterface.application_interface_modelConstants;
import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
import org.apache.airavata.model.parallelism.ApplicationParallelismType;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static org.junit.Assert.assertTrue;
public class ApplicationDeploymentRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ApplicationDeploymentRepositoryTest.class);
private ComputeResourceRepository computeResourceRepository;
private ApplicationInterfaceRepository applicationInterfaceRepository;
private ApplicationDeploymentRepository applicationDeploymentRepository;
private String gatewayId = "testGateway";
public ApplicationDeploymentRepositoryTest() {
super(Database.APP_CATALOG);
computeResourceRepository = new ComputeResourceRepository();
applicationInterfaceRepository = new ApplicationInterfaceRepository();
applicationDeploymentRepository = new ApplicationDeploymentRepository();
}
private String addSampleApplicationModule(String tag) throws AppCatalogException {
ApplicationModule applicationModule = new ApplicationModule();
applicationModule.setAppModuleId("appMod" + tag);
applicationModule.setAppModuleName("appModName" + tag);
return applicationInterfaceRepository.addApplicationModule(applicationModule, gatewayId);
}
private String addSampleComputeResource(String tag) throws AppCatalogException {
ComputeResourceDescription computeResourceDescription = new ComputeResourceDescription();
computeResourceDescription.setComputeResourceId("compHost" + tag);
computeResourceDescription.setHostName("compHostName" + tag);
return computeResourceRepository.addComputeResource(computeResourceDescription);
}
private boolean deepCompareDeployment(ApplicationDeploymentDescription expected, ApplicationDeploymentDescription actual) {
boolean equals = true;
equals = equals && EqualsBuilder.reflectionEquals(expected, actual,
"moduleLoadCmds", "libPrependPaths", "libAppendPaths" ,"setEnvironment" ,"preJobCommands"
,"postJobCommands", "__isset_bitfield");
equals = equals && deepCompareLists(expected.getSetEnvironment(), actual.getSetEnvironment(), Comparator.comparingInt(SetEnvPaths::getEnvPathOrder));
equals = equals && deepCompareLists(expected.getLibPrependPaths(), actual.getLibPrependPaths(), Comparator.comparingInt(SetEnvPaths::getEnvPathOrder));
equals = equals && deepCompareLists(expected.getLibAppendPaths(), actual.getLibAppendPaths(), Comparator.comparingInt(SetEnvPaths::getEnvPathOrder));
equals = equals && deepCompareLists(expected.getModuleLoadCmds(), actual.getModuleLoadCmds(), Comparator.comparingInt(CommandObject::getCommandOrder));
equals = equals && deepCompareLists(expected.getPreJobCommands(), actual.getPreJobCommands(), Comparator.comparingInt(CommandObject::getCommandOrder));
equals = equals && deepCompareLists(expected.getPostJobCommands(), actual.getPostJobCommands(), Comparator.comparingInt(CommandObject::getCommandOrder));
return equals;
}
private <T> boolean deepCompareLists(List<T> expected, List<T> actual, Comparator<? super T> c) {
List<T> expectedCopy = new ArrayList<>(expected);
expectedCopy.sort(c);
List<T> actualCopy = new ArrayList<>(actual);
actualCopy.sort(c);
return EqualsBuilder.reflectionEquals(expectedCopy, actualCopy);
}
private ApplicationDeploymentDescription prepareSampleDeployment(String tag, String applicationModule, String computeResource) {
CommandObject moduleLoadCmd = new CommandObject();
moduleLoadCmd.setCommand("moduleLoadCmd");
moduleLoadCmd.setCommandOrder(1);
SetEnvPaths libPrependPath = new SetEnvPaths();
libPrependPath.setName("libPrependPath");
libPrependPath.setValue("libPrependPathValue");
libPrependPath.setEnvPathOrder(1);
SetEnvPaths libAppendPath = new SetEnvPaths();
libAppendPath.setName("libAppendPath");
libAppendPath.setValue("libAppendPathValue");
libAppendPath.setEnvPathOrder(2);
SetEnvPaths setEnvironment = new SetEnvPaths();
setEnvironment.setName("setEnvironment");
setEnvironment.setValue("setEnvironmentValue");
setEnvironment.setEnvPathOrder(3);
CommandObject preJobCommand = new CommandObject();
preJobCommand.setCommand("preCommand");
preJobCommand.setCommandOrder(2);
CommandObject postJobCommand = new CommandObject();
postJobCommand.setCommand("postCommand");
postJobCommand.setCommandOrder(3);
ApplicationDeploymentDescription deployment = new ApplicationDeploymentDescription();
deployment.setAppDeploymentId("appDep" + tag);
deployment.setAppDeploymentDescription("test application deployment" + tag);
deployment.setAppModuleId(applicationModule);
deployment.setComputeHostId(computeResource);
deployment.setExecutablePath("executablePath" + tag);
deployment.setParallelism(ApplicationParallelismType.SERIAL);
deployment.setModuleLoadCmds(new ArrayList<>(Arrays.asList(moduleLoadCmd)));
deployment.setLibPrependPaths(new ArrayList<>(Arrays.asList(libPrependPath)));
deployment.setLibAppendPaths(new ArrayList<>(Arrays.asList(libAppendPath)));
deployment.setPreJobCommands(new ArrayList<>(Arrays.asList(preJobCommand)));
deployment.setPostJobCommands(new ArrayList<>(Arrays.asList(postJobCommand)));
deployment.setSetEnvironment(new ArrayList<>(Arrays.asList(setEnvironment)));
deployment.setDefaultQueueName("queue" + tag);
deployment.setDefaultCPUCount(10);
deployment.setDefaultNodeCount(5);
deployment.setDefaultWalltime(15);
deployment.setEditableByUser(true);
return deployment;
}
@Test
public void createAppDeploymentTest() throws AppCatalogException {
Assert.assertNull(applicationDeploymentRepository.getApplicationDeployement("appDep1"));
String applicationModule = addSampleApplicationModule("1");
String computeResource = addSampleComputeResource("1");
ApplicationDeploymentDescription deployment = prepareSampleDeployment("1", applicationModule, computeResource);
String deploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
ApplicationDeploymentDescription savedDeployment = applicationDeploymentRepository.getApplicationDeployement("appDep1");
Assert.assertNotNull(savedDeployment);
Assert.assertTrue(deepCompareDeployment(deployment, savedDeployment));
}
@Test
public void createAppDeploymentWithDefaultIdTest() throws AppCatalogException {
String applicationModule = addSampleApplicationModule("1");
String computeResource = addSampleComputeResource("1");
ApplicationDeploymentDescription deployment = prepareSampleDeployment("1", applicationModule, computeResource);
deployment.setAppDeploymentId(application_interface_modelConstants.DEFAULT_ID);
String deploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
Assert.assertNotEquals(deploymentId, application_interface_modelConstants.DEFAULT_ID);
Assert.assertEquals("compHostName1" + "_" + applicationModule, deploymentId);
}
@Test
public void updateAppDeploymentTest() throws AppCatalogException {
String applicationModule = addSampleApplicationModule("1");
String computeResource = addSampleComputeResource("1");
ApplicationDeploymentDescription deployment = prepareSampleDeployment("1", applicationModule, computeResource);
String deploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
deployment.setDefaultQueueName("updated");
deployment.setAppDeploymentDescription("updated description");
CommandObject moduleLoadCmd = new CommandObject();
moduleLoadCmd.setCommand("moduleLoadCmd2");
moduleLoadCmd.setCommandOrder(2);
deployment.getModuleLoadCmds().add(moduleLoadCmd);
SetEnvPaths libPrependPath = new SetEnvPaths();
libPrependPath.setName("libPrependPath2");
libPrependPath.setValue("libPrependPathValue2");
libPrependPath.setEnvPathOrder(4);
deployment.getLibPrependPaths().add(libPrependPath);
deployment.setExecutablePath("executablePath2");
deployment.setParallelism(ApplicationParallelismType.MPI);
deployment.setDefaultCPUCount(12);
deployment.setDefaultNodeCount(15);
deployment.setDefaultWalltime(10);
deployment.setEditableByUser(false);
applicationDeploymentRepository.updateApplicationDeployment(deploymentId, deployment);
ApplicationDeploymentDescription updatedDeployment = applicationDeploymentRepository.getApplicationDeployement(deploymentId);
Assert.assertTrue(deepCompareDeployment(deployment, updatedDeployment));
}
@Test
public void listAllDeployments() throws AppCatalogException {
List<ApplicationDeploymentDescription> allDeployments = new ArrayList<>();
for (int i = 0 ; i < 5; i++) {
String applicationModule = addSampleApplicationModule(i + "");
String computeResource = addSampleComputeResource(i + "");
ApplicationDeploymentDescription deployment = prepareSampleDeployment(i + "", applicationModule, computeResource);
allDeployments.add(deployment);
String savedDeploymentId = applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
Assert.assertEquals(deployment.getAppDeploymentId(), savedDeploymentId);
}
List<ApplicationDeploymentDescription> appDeploymentList = applicationDeploymentRepository.getAllApplicationDeployements(gatewayId);
List<String> appDeploymentIds = applicationDeploymentRepository.getAllApplicationDeployementIds();
Assert.assertEquals(allDeployments.size(), appDeploymentList.size());
Assert.assertEquals(allDeployments.size(), appDeploymentIds.size());
for (int i = 0; i < allDeployments.size(); i++) {
Assert.assertTrue(deepCompareDeployment(allDeployments.get(i), appDeploymentList.get(i)));
Assert.assertEquals(allDeployments.get(i).getAppDeploymentId(), appDeploymentIds.get(i));
}
}
@Test
public void filterApplicationDeploymentsTest() throws AppCatalogException {
String applicationModule1 = addSampleApplicationModule("1");
String computeResource1 = addSampleComputeResource("1");
String applicationModule2 = addSampleApplicationModule("2");
String computeResource2 = addSampleComputeResource("2");
ApplicationDeploymentDescription deployment1 = prepareSampleDeployment( "1", applicationModule1, computeResource1);
ApplicationDeploymentDescription deployment2 = prepareSampleDeployment( "2", applicationModule1, computeResource2);
ApplicationDeploymentDescription deployment3 = prepareSampleDeployment( "3", applicationModule2, computeResource2);
applicationDeploymentRepository.saveApplicationDeployment(deployment1, gatewayId);
applicationDeploymentRepository.saveApplicationDeployment(deployment2, gatewayId);
applicationDeploymentRepository.saveApplicationDeployment(deployment3, gatewayId);
Map<String, String> filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
List<ApplicationDeploymentDescription> filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.assertEquals(2, filteredDeployments.size());
Assert.assertTrue(deepCompareDeployment(deployment1, filteredDeployments.get(0)));
Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(1)));
filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule2);
filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.assertEquals(1, filteredDeployments.size());
Assert.assertTrue(deepCompareDeployment(deployment3, filteredDeployments.get(0)));
filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource1);
filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.assertEquals(1, filteredDeployments.size());
Assert.assertTrue(deepCompareDeployment(deployment1, filteredDeployments.get(0)));
filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource2);
filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.assertEquals(2, filteredDeployments.size());
Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(0)));
Assert.assertTrue(deepCompareDeployment(deployment3, filteredDeployments.get(1)));
filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
filters.put(DBConstants.ApplicationDeployment.COMPUTE_HOST_ID, computeResource2);
filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.assertEquals(1, filteredDeployments.size());
Assert.assertTrue(deepCompareDeployment(deployment2, filteredDeployments.get(0)));
filters = new HashMap<>();
filters.put(DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID, applicationModule1);
filters.put("INVALID FIELD", computeResource2);
try {
filteredDeployments = applicationDeploymentRepository.getApplicationDeployments(filters);
Assert.fail();
} catch (Exception e) {
// ignore
}
}
@Test
public void deleteApplicationDeploymentTest() throws AppCatalogException {
String applicationModule = addSampleApplicationModule("1");
String computeResource = addSampleComputeResource("1");
ApplicationDeploymentDescription deployment = prepareSampleDeployment( "1", applicationModule, computeResource);
Assert.assertNull(applicationDeploymentRepository.getApplicationDeployement(deployment.getAppDeploymentId()));
applicationDeploymentRepository.addApplicationDeployment(deployment, gatewayId);
Assert.assertNotNull(applicationDeploymentRepository.getApplicationDeployement(deployment.getAppDeploymentId()));
applicationDeploymentRepository.removeAppDeployment(deployment.getAppDeploymentId());
Assert.assertNull(applicationInterfaceRepository.getApplicationInterface(deployment.getAppDeploymentId()));
}
@Test
public void accessibleDeploymentTest() throws AppCatalogException {
String applicationModule1 = addSampleApplicationModule("1");
String computeResource1 = addSampleComputeResource("1");
String applicationModule2 = addSampleApplicationModule("2");
String computeResource2 = addSampleComputeResource("2");
ApplicationDeploymentDescription deployment1 = prepareSampleDeployment( "1", applicationModule1, computeResource1);
ApplicationDeploymentDescription deployment2 = prepareSampleDeployment( "2", applicationModule1, computeResource2);
ApplicationDeploymentDescription deployment3 = prepareSampleDeployment( "3", applicationModule2, computeResource2);
applicationDeploymentRepository.saveApplicationDeployment(deployment1, gatewayId);
applicationDeploymentRepository.saveApplicationDeployment(deployment2, gatewayId);
applicationDeploymentRepository.saveApplicationDeployment(deployment3, gatewayId);
List<String> accessibleAppIds = new ArrayList<>();
accessibleAppIds.add(deployment1.getAppDeploymentId());
accessibleAppIds.add(deployment2.getAppDeploymentId());
accessibleAppIds.add(deployment3.getAppDeploymentId());
List<String> accessibleCompHostIds = new ArrayList<>();
accessibleCompHostIds.add(computeResource1);
List<ApplicationDeploymentDescription> accessibleApplicationDeployments = applicationDeploymentRepository
.getAccessibleApplicationDeployments(gatewayId, accessibleAppIds, accessibleCompHostIds);
assertTrue(accessibleApplicationDeployments.size() == 1);
assertTrue(deepCompareDeployment(deployment1, accessibleApplicationDeployments.get(0)));
accessibleCompHostIds = new ArrayList<>();
accessibleCompHostIds.add(computeResource2);
accessibleApplicationDeployments = applicationDeploymentRepository
.getAccessibleApplicationDeployments(gatewayId, accessibleAppIds, accessibleCompHostIds);
assertTrue(accessibleApplicationDeployments.size() == 2);
assertTrue(deepCompareDeployment(deployment2, accessibleApplicationDeployments.get(0)));
assertTrue(deepCompareDeployment(deployment3, accessibleApplicationDeployments.get(1)));
}
} | 818 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/JobSubmissionInterfaceRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.computeresource.BatchQueue;
import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
import org.apache.airavata.registry.core.entities.appcatalog.JobSubmissionInterfacePK;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobSubmissionInterfaceRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(ComputeResourceRepositoryTest.class);
private JobSubmissionInterfaceRepository jobSubmissionInterfaceRepository;
private ComputeResourceRepository computeResourceRepository;
private String computeResourceId;
public JobSubmissionInterfaceRepositoryTest() {
super(Database.APP_CATALOG);
jobSubmissionInterfaceRepository = new JobSubmissionInterfaceRepository();
computeResourceRepository = new ComputeResourceRepository();
}
@Before
public void createTestComputeResource() throws AppCatalogException {
ComputeResourceDescription description = new ComputeResourceDescription();
description.setHostName("localhost");
description.addToBatchQueues(new BatchQueue("queue1"));
description.addToBatchQueues(new BatchQueue("queue2"));
computeResourceId = computeResourceRepository.addComputeResource(description);
}
@After
public void removeTestComputeResource() throws AppCatalogException {
computeResourceRepository.removeComputeResource(computeResourceId);
}
@Test
public void testAddJobSubmissionInterface() throws AppCatalogException {
JobSubmissionInterface jobSubmissionInterface = new JobSubmissionInterface();
jobSubmissionInterface.setJobSubmissionInterfaceId("test");
jobSubmissionInterface.setPriorityOrder(1);
jobSubmissionInterface.setJobSubmissionProtocol(JobSubmissionProtocol.SSH);
String jobSubmissionInterfaceId = jobSubmissionInterfaceRepository.addJobSubmission(computeResourceId, jobSubmissionInterface);
JobSubmissionInterfacePK pk = new JobSubmissionInterfacePK();
pk.setComputeResourceId(computeResourceId);
pk.setJobSubmissionInterfaceId(jobSubmissionInterfaceId);
JobSubmissionInterface retrievedJobSubmissionInterface = jobSubmissionInterfaceRepository.get(pk);
Assert.assertEquals("test", retrievedJobSubmissionInterface.getJobSubmissionInterfaceId());
Assert.assertEquals(1, retrievedJobSubmissionInterface.getPriorityOrder());
Assert.assertEquals(JobSubmissionProtocol.SSH, retrievedJobSubmissionInterface.getJobSubmissionProtocol());
jobSubmissionInterfaceRepository.delete(pk);
}
}
| 819 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.gatewaygroups.GatewayGroups;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GatewayGroupsRepositoryTest extends TestBase {
private static final String GATEWAY_ID = "gateway-id";
private static final String ADMIN_GROUPS_ID = "admin-groups-id";
private static final String READ_ONLY_ADMINS_GROUP_ID = "read-only-admins-group-id";
private static final String DEFAULT_GATEWAY_USERS_GROUP_ID = "default-gateway-users-group-id";
private GatewayGroupsRepository gatewayGroupsRepository;
private static final Logger logger = LoggerFactory.getLogger(GatewayProfileRepositoryTest.class);
public GatewayGroupsRepositoryTest() {
super(Database.APP_CATALOG);
gatewayGroupsRepository = new GatewayGroupsRepository();
}
@Test
public void testCreateAndRetrieveGatewayGroups() throws Exception {
GatewayGroups gatewayGroups = new GatewayGroups();
gatewayGroups.setGatewayId(GATEWAY_ID);
gatewayGroups.setAdminsGroupId(ADMIN_GROUPS_ID);
gatewayGroups.setReadOnlyAdminsGroupId(READ_ONLY_ADMINS_GROUP_ID);
gatewayGroups.setDefaultGatewayUsersGroupId(DEFAULT_GATEWAY_USERS_GROUP_ID);
gatewayGroupsRepository.create(gatewayGroups);
GatewayGroups retrievedGatewayGroups = gatewayGroupsRepository.get(GATEWAY_ID);
Assert.assertEquals(ADMIN_GROUPS_ID, retrievedGatewayGroups.getAdminsGroupId());
Assert.assertEquals(READ_ONLY_ADMINS_GROUP_ID, retrievedGatewayGroups.getReadOnlyAdminsGroupId());
Assert.assertEquals(DEFAULT_GATEWAY_USERS_GROUP_ID, retrievedGatewayGroups.getDefaultGatewayUsersGroupId());
Assert.assertEquals(gatewayGroups, retrievedGatewayGroups);
gatewayGroupsRepository.delete(GATEWAY_ID);
}
@Test
public void testUpdateGatewayGroups() throws Exception {
GatewayGroups gatewayGroups = new GatewayGroups();
gatewayGroups.setGatewayId(GATEWAY_ID);
gatewayGroups.setAdminsGroupId(ADMIN_GROUPS_ID);
gatewayGroups.setReadOnlyAdminsGroupId(READ_ONLY_ADMINS_GROUP_ID);
gatewayGroups.setDefaultGatewayUsersGroupId(DEFAULT_GATEWAY_USERS_GROUP_ID);
gatewayGroupsRepository.create(gatewayGroups);
final String defaultGatewayUsersGroupId = "some-other-group-id";
gatewayGroups.setDefaultGatewayUsersGroupId(defaultGatewayUsersGroupId);
gatewayGroupsRepository.update(gatewayGroups);
GatewayGroups retrievedGatewayGroups = gatewayGroupsRepository.get(GATEWAY_ID);
Assert.assertEquals(ADMIN_GROUPS_ID, retrievedGatewayGroups.getAdminsGroupId());
Assert.assertEquals(READ_ONLY_ADMINS_GROUP_ID, retrievedGatewayGroups.getReadOnlyAdminsGroupId());
Assert.assertEquals(defaultGatewayUsersGroupId, retrievedGatewayGroups.getDefaultGatewayUsersGroupId());
Assert.assertEquals(gatewayGroups, retrievedGatewayGroups);
gatewayGroupsRepository.delete(GATEWAY_ID);
}
}
| 820 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepositoryTest.java | package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.*;
public class UserResourceProfileRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(UserResourceProfileRepositoryTest.class);
private UserResourceProfileRepository userResourceProfileRepository;
private String userId = "testUser";
private String gatewayId = "testGateway";
public UserResourceProfileRepositoryTest() {
super(Database.APP_CATALOG);
userResourceProfileRepository = new UserResourceProfileRepository();
}
@Test
public void UserResourceProfileRepositoryTest() throws AppCatalogException {
UserComputeResourcePreference userComputeResourcePreference = new UserComputeResourcePreference();
userComputeResourcePreference.setComputeResourceId("computeResource1");
userComputeResourcePreference.setLoginUserName(userId);
userComputeResourcePreference.setPreferredBatchQueue("queue1");
userComputeResourcePreference.setScratchLocation("location1");
UserStoragePreference userStoragePreference = new UserStoragePreference();
userStoragePreference.setStorageResourceId("storageResource1");
userStoragePreference.setLoginUserName(userId);
userStoragePreference.setFileSystemRootLocation("location2");
userStoragePreference.setResourceSpecificCredentialStoreToken("token1");
UserResourceProfile userResourceProfile = new UserResourceProfile();
userResourceProfile.setUserId(userId);
userResourceProfile.setGatewayID(gatewayId);
userResourceProfile.setCredentialStoreToken("token");
userResourceProfile.setUserComputeResourcePreferences(Arrays.asList(userComputeResourcePreference));
userResourceProfile.setUserStoragePreferences(Arrays.asList(userStoragePreference));
userResourceProfile.setIdentityServerTenant("tenant1");
userResourceProfile.setIdentityServerPwdCredToken("password");
if (!userResourceProfileRepository.isUserResourceProfileExists(userId, gatewayId))
userResourceProfileRepository.addUserResourceProfile(userResourceProfile);
assertEquals(userId, userResourceProfile.getUserId());
userResourceProfile.setIdentityServerTenant("tenant2");
userResourceProfileRepository.updateUserResourceProfile(userId, gatewayId, userResourceProfile);
UserResourceProfile retrievedUserResourceProfile = userResourceProfileRepository.getUserResourceProfile(userId, gatewayId);
assertTrue(retrievedUserResourceProfile.getUserStoragePreferences().size() == 1);
assertEquals(userResourceProfile.getIdentityServerTenant(), retrievedUserResourceProfile.getIdentityServerTenant());
UserComputeResourcePreference retrievedUserComputeResourcePreference = userResourceProfileRepository.getUserComputeResourcePreference(
userId, gatewayId, userComputeResourcePreference.getComputeResourceId());
assertEquals(userComputeResourcePreference.getLoginUserName(), retrievedUserComputeResourcePreference.getLoginUserName());
UserStoragePreference retrievedUserStoragePreference = userResourceProfileRepository.getUserStoragePreference(
userId, gatewayId, userStoragePreference.getStorageResourceId());
assertEquals(userStoragePreference.getFileSystemRootLocation(), retrievedUserStoragePreference.getFileSystemRootLocation());
assertTrue(userResourceProfileRepository.getAllUserResourceProfiles().size() == 1);
assertTrue(userResourceProfileRepository.getAllUserComputeResourcePreferences(userId, gatewayId).size() == 1);
assertTrue(userResourceProfileRepository.getAllUserStoragePreferences(userId, gatewayId).size() == 1);
assertTrue(userResourceProfileRepository.getGatewayProfileIds(gatewayId).size() == 1);
assertEquals(userId, userResourceProfileRepository.getUserNamefromID(userId, gatewayId));
userResourceProfileRepository.removeUserComputeResourcePreferenceFromGateway(userId, gatewayId, userComputeResourcePreference.getComputeResourceId());
userResourceProfileRepository.removeUserDataStoragePreferenceFromGateway(userId, gatewayId, userStoragePreference.getStorageResourceId());
userResourceProfileRepository.removeUserResourceProfile(userId, gatewayId);
}
}
| 821 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayProfileRepositoryTest.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.appcatalog.computeresource.ComputeResourceDescription;
import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
import org.apache.airavata.model.data.movement.DataMovementProtocol;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class GatewayProfileRepositoryTest extends TestBase {
private static final Logger logger = LoggerFactory.getLogger(GatewayProfileRepositoryTest.class);
private GwyResourceProfileRepository gwyResourceProfileRepository;
public GatewayProfileRepositoryTest() {
super(Database.APP_CATALOG);
gwyResourceProfileRepository = new GwyResourceProfileRepository();
}
@Test
public void gatewayProfileRepositorytest() throws AppCatalogException, ApplicationSettingsException {
// Verify that the default Gateway Resource Profile exists already
List<GatewayResourceProfile> defaultGatewayResourceProfileList = this.gwyResourceProfileRepository
.getAllGatewayProfiles();
assertEquals(1, defaultGatewayResourceProfileList.size());
assertEquals(ServerSettings.getDefaultUserGateway(), defaultGatewayResourceProfileList.get(0).getGatewayID());
GatewayResourceProfile gf = new GatewayResourceProfile();
ComputeResourceRepository computeResourceRepository = new ComputeResourceRepository();
ComputeResourceDescription cm1 = new ComputeResourceDescription();
cm1.setHostName("localhost");
cm1.setResourceDescription("test compute host");
String hostId1 = computeResourceRepository.addComputeResource(cm1);
ComputeResourceDescription cm2 = new ComputeResourceDescription();
cm2.setHostName("localhost");
cm2.setResourceDescription("test compute host");
String hostId2 = computeResourceRepository.addComputeResource(cm2);
ComputeResourcePreference preference1 = new ComputeResourcePreference();
preference1.setComputeResourceId(hostId1);
preference1.setOverridebyAiravata(true);
preference1.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.SSH);
preference1.setPreferredDataMovementProtocol(DataMovementProtocol.SCP);
preference1.setPreferredBatchQueue("queue1");
preference1.setScratchLocation("/tmp");
preference1.setAllocationProjectNumber("project1");
Map<String, String> sshConfig = new HashMap<>();
sshConfig.put("ANYTEST", "check");
preference1.setSshAccountProvisionerConfig(sshConfig);
ComputeResourcePreference preference2 = new ComputeResourcePreference();
preference2.setComputeResourceId(hostId2);
preference2.setOverridebyAiravata(false);
preference2.setPreferredJobSubmissionProtocol(JobSubmissionProtocol.LOCAL);
preference2.setPreferredDataMovementProtocol(DataMovementProtocol.GridFTP);
preference2.setPreferredBatchQueue("queue2");
preference2.setScratchLocation("/tmp");
preference2.setAllocationProjectNumber("project2");
List<ComputeResourcePreference> list = new ArrayList<ComputeResourcePreference>();
list.add(preference1);
list.add(preference2);
gf.setGatewayID("testGateway");
gf.setCredentialStoreToken("testCredential");
gf.setIdentityServerPwdCredToken("pwdCredential");
gf.setIdentityServerTenant("testTenant");
gf.setComputeResourcePreferences(list);
GatewayResourceProfile gf1 = new GatewayResourceProfile();
gf1.setGatewayID("testGateway1");
gf1.setCredentialStoreToken("testCredential");
gf1.setIdentityServerPwdCredToken("pwdCredential");
gf1.setIdentityServerTenant("testTenant");
String gwId = gwyResourceProfileRepository.addGatewayResourceProfile(gf);
GatewayResourceProfile retrievedProfile = null;
if (gwyResourceProfileRepository.isExists(gwId)) {
retrievedProfile = gwyResourceProfileRepository.getGatewayProfile(gwId);
System.out.println("************ gateway id ************** :" + retrievedProfile.getGatewayID());
assertTrue("Retrieved gateway id matched", retrievedProfile.getGatewayID().equals("testGateway"));
assertTrue(retrievedProfile.getCredentialStoreToken().equals("testCredential"));
assertTrue(retrievedProfile.getIdentityServerPwdCredToken().equals("pwdCredential"));
assertTrue(retrievedProfile.getIdentityServerTenant().equals("testTenant"));
}
gwyResourceProfileRepository.addGatewayResourceProfile(gf1);
List<GatewayResourceProfile> getGatewayResourceList = gwyResourceProfileRepository.getAllGatewayProfiles();
assertEquals("should be 3 gateway profiles (1 default and 2 just added)", 3, getGatewayResourceList.size());
List<ComputeResourcePreference> preferences = gwyResourceProfileRepository
.getAllComputeResourcePreferences(gwId);
System.out.println("compute preferences size : " + preferences.size());
assertTrue(preferences.size() == 2);
if (preferences != null && !preferences.isEmpty()) {
ComputeResourcePreference pref1 = preferences.stream().filter(p -> p.getComputeResourceId().equals(hostId1)).findFirst().get();
assertTrue(pref1.isOverridebyAiravata());
ComputeResourcePreference pref2 = preferences.stream().filter(p -> p.getComputeResourceId().equals(hostId2)).findFirst().get();
assertFalse(pref2.isOverridebyAiravata());
for (ComputeResourcePreference cm : preferences) {
System.out.println("******** host id ********* : " + cm.getComputeResourceId());
System.out.println(cm.getPreferredBatchQueue());
System.out.println(cm.getPreferredDataMovementProtocol());
System.out.println(cm.getPreferredJobSubmissionProtocol());
}
}
computeResourceRepository.removeComputeResource(hostId1);
computeResourceRepository.removeComputeResource(hostId2);
gwyResourceProfileRepository.delete("testGateway");
gwyResourceProfileRepository.delete("testGateway1");
}
}
| 822 |
0 | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/test/java/org/apache/airavata/registry/core/repositories/workflowcatalog/WorkflowRepositoryTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.workflowcatalog;
import org.apache.airavata.model.workflow.*;
import org.apache.airavata.registry.core.repositories.common.TestBase;
import org.apache.airavata.registry.cpi.WorkflowCatalogException;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
// TODO: fix derby initialization script so that this test can be re-enabled
@Ignore
public class WorkflowRepositoryTest extends TestBase {
private WorkflowRepository workflowRepository;
// Workflow related constants
private String EXPERIMENT_ID = "sample_exp_id";
private String SAMPLE_DESCRIPTION = "Sample description about the application";
// Application related constants
private String APPLICATION_PREFIX = "app_";
private String SAMPLE_APPLICATION_INTERFACE_ID = "app_interface_1";
private String SAMPLE_COMPUTE_RESOURCE_ID = "comp_resource_1";
private String SAMPLE_QUEUE_NAME = "queue_1";
private int SAMPLE_NODE_COUNT = 4;
private int SAMPLE_CORE_COUNT = 4;
private int SAMPLE_WALL_TIME_LIMIT = 4;
private int SAMPLE_PHYSICAL_MEMORY = 1000;
private String SAMPLE_APP_INPUT_NAME = "app_input";
private String SAMPLE_APP_OUTPUT_NAME = "app_output";
// Handler related constants
private String HANDLER_PREFIX = "handler_";
private String SAMPLE_HANDLER_INPUT_NAME = "handler_input";
private String SAMPLE_HANDLER_OUTPUT_NAME = "handler_output";
public WorkflowRepositoryTest() {
super(Database.WORKFLOW_CATALOG);
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
workflowRepository = new WorkflowRepository();
}
@Test
public void SubmitWorkflowTest() throws WorkflowCatalogException {
workflowRepository.registerWorkflow(getSimpleWorkflow(), EXPERIMENT_ID);
AiravataWorkflow workflow = workflowRepository.getWorkflow(workflowRepository.getWorkflowId(EXPERIMENT_ID));
//Assert workflow
assertEquals(SAMPLE_DESCRIPTION, workflow.getDescription());
assertEquals(2, workflow.getApplicationsSize());
assertEquals(2, workflow.getHandlersSize());
assertEquals(3, workflow.getConnectionsSize());
//Assert applications
for (WorkflowApplication app : workflow.getApplications()) {
assertEquals(SAMPLE_APPLICATION_INTERFACE_ID, app.getApplicationInterfaceId());
assertEquals(SAMPLE_COMPUTE_RESOURCE_ID, app.getComputeResourceId());
assertEquals(SAMPLE_QUEUE_NAME, app.getQueueName());
assertEquals(SAMPLE_NODE_COUNT, app.getNodeCount());
assertEquals(SAMPLE_CORE_COUNT, app.getCoreCount());
assertEquals(SAMPLE_WALL_TIME_LIMIT, app.getWallTimeLimit());
assertEquals(SAMPLE_PHYSICAL_MEMORY, app.getPhysicalMemory());
}
}
private AiravataWorkflow getSimpleWorkflow() {
AiravataWorkflow workflow = new AiravataWorkflow();
//Adding basic workflow parameters
workflow.setDescription(SAMPLE_DESCRIPTION);
//Adding workflow applications
WorkflowApplication application1 = new WorkflowApplication();
application1.setId(APPLICATION_PREFIX + 1);
application1.setApplicationInterfaceId(SAMPLE_APPLICATION_INTERFACE_ID);
application1.setComputeResourceId(SAMPLE_COMPUTE_RESOURCE_ID);
application1.setQueueName(SAMPLE_QUEUE_NAME);
application1.setNodeCount(SAMPLE_NODE_COUNT);
application1.setCoreCount(SAMPLE_CORE_COUNT);
application1.setWallTimeLimit(SAMPLE_WALL_TIME_LIMIT);
application1.setPhysicalMemory(SAMPLE_PHYSICAL_MEMORY);
WorkflowApplication application2 = new WorkflowApplication();
application2.setId(APPLICATION_PREFIX + 2);
application2.setApplicationInterfaceId(SAMPLE_APPLICATION_INTERFACE_ID);
application2.setComputeResourceId(SAMPLE_COMPUTE_RESOURCE_ID);
application2.setQueueName(SAMPLE_QUEUE_NAME);
application2.setNodeCount(SAMPLE_NODE_COUNT);
application2.setCoreCount(SAMPLE_CORE_COUNT);
application2.setWallTimeLimit(SAMPLE_WALL_TIME_LIMIT);
application2.setPhysicalMemory(SAMPLE_PHYSICAL_MEMORY);
workflow.addToApplications(application1);
workflow.addToApplications(application2);
//Adding workflow handlers
WorkflowHandler handler1 = new WorkflowHandler();
handler1.setId(HANDLER_PREFIX + 1);
handler1.setType(HandlerType.FLOW_STARTER);
WorkflowHandler handler2 = new WorkflowHandler();
handler2.setId(HANDLER_PREFIX + 2);
handler2.setType(HandlerType.FLOW_TERMINATOR);
workflow.addToHandlers(handler1);
workflow.addToHandlers(handler2);
//Adding workflow connections
WorkflowConnection connection1 = new WorkflowConnection();
connection1.setFromType(ComponentType.HANDLER);
connection1.setFromId(HANDLER_PREFIX + 1);
connection1.setFromOutputName(SAMPLE_HANDLER_OUTPUT_NAME);
connection1.setToType(ComponentType.APPLICATION);
connection1.setToId(APPLICATION_PREFIX + 1);
connection1.setToInputName(SAMPLE_APP_INPUT_NAME);
WorkflowConnection connection2 = new WorkflowConnection();
connection2.setFromType(ComponentType.APPLICATION);
connection2.setFromId(APPLICATION_PREFIX + 1);
connection2.setFromOutputName(SAMPLE_APP_OUTPUT_NAME);
connection2.setToType(ComponentType.APPLICATION);
connection2.setToId(APPLICATION_PREFIX + 2);
connection2.setToInputName(SAMPLE_APP_INPUT_NAME);
WorkflowConnection connection3 = new WorkflowConnection();
connection3.setFromType(ComponentType.APPLICATION);
connection3.setFromId(APPLICATION_PREFIX + 2);
connection3.setFromOutputName(SAMPLE_APP_OUTPUT_NAME);
connection3.setToType(ComponentType.HANDLER);
connection3.setToId(HANDLER_PREFIX + 2);
connection3.setToInputName(SAMPLE_HANDLER_INPUT_NAME);
workflow.addToConnections(connection1);
workflow.addToConnections(connection2);
workflow.addToConnections(connection3);
return workflow;
}
}
| 823 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/RegistryException.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RegistryException extends Exception {
private final static Logger logger = LoggerFactory.getLogger(RegistryException.class);
} | 824 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/AppCatalogUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils;
import java.util.UUID;
public class AppCatalogUtils {
public static String getID (String name){
String pro = name.replaceAll("\\s", "");
return pro + "_" + UUID.randomUUID();
}
}
| 825 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/WorkflowCatalogUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils;
import java.util.UUID;
public class WorkflowCatalogUtils {
public static String getID (String name){
String pro = name.replaceAll("\\s", "");
return pro + "_" + UUID.randomUUID();
}
}
| 826 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ReplicaCatalogJDBCConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
public class ReplicaCatalogJDBCConfig implements JDBCConfig {
private static final String REPLICA_CATALOG_JDBC_DRIVER = "replicacatalog.jdbc.driver";
private static final String REPLICA_CATALOG_JDBC_URL = "replicacatalog.jdbc.url";
private static final String REPLICA_CATALOG_JDBC_USER = "replicacatalog.jdbc.user";
private static final String REPLICA_CATALOG_JDBC_PASSWORD = "replicacatalog.jdbc.password";
private static final String REPLICA_CATALOG_VALIDATION_QUERY = "replicacatalog.validationQuery";
@Override
public String getURL() {
return readServerProperties(REPLICA_CATALOG_JDBC_URL);
}
@Override
public String getDriver() {
return readServerProperties(REPLICA_CATALOG_JDBC_DRIVER);
}
@Override
public String getUser() {
return readServerProperties(REPLICA_CATALOG_JDBC_USER);
}
@Override
public String getPassword() {
return readServerProperties(REPLICA_CATALOG_JDBC_PASSWORD);
}
@Override
public String getValidationQuery() {
return readServerProperties(REPLICA_CATALOG_VALIDATION_QUERY);
}
private String readServerProperties(String propertyName) {
try {
return ServerSettings.getSetting(propertyName);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Unable to read airavata-server.properties...", e);
}
}
}
| 827 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/QueryConstants.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.model.status.JobStatus;
import org.apache.airavata.model.status.QueueStatusModel;
import org.apache.airavata.model.user.UserProfile;
import org.apache.airavata.registry.core.entities.airavataworkflowcatalog.AiravataWorkflowEntity;
import org.apache.airavata.registry.core.entities.appcatalog.*;
import org.apache.airavata.registry.core.entities.expcatalog.*;
import org.apache.airavata.registry.core.entities.replicacatalog.DataProductEntity;
import javax.print.attribute.standard.JobState;
public interface QueryConstants {
String FIND_USER_PROFILE_BY_USER_ID = "SELECT u FROM UserProfileEntity u " +
"where u.userId LIKE :" + UserProfile._Fields.USER_ID.getFieldName() + " " +
"AND u.gatewayId LIKE :" + UserProfile._Fields.GATEWAY_ID.getFieldName() + "";
String FIND_ALL_USER_PROFILES_BY_GATEWAY_ID = "SELECT u FROM UserProfileEntity u " +
"where u.gatewayId LIKE :" + UserProfile._Fields.GATEWAY_ID.getFieldName() + "";
// Application Deployment Queries
String FIND_APPLICATION_DEPLOYMENTS_FOR_GATEWAY_ID = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AD.gatewayId LIKE :" + DBConstants.ApplicationDeployment.GATEWAY_ID;
String FIND_APPLICATION_DEPLOYMENTS_FOR_APPLICATION_MODULE_ID = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AD.appModuleId LIKE :" + DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID;
String FIND_APPLICATION_DEPLOYMENTS_FOR_COMPUTE_HOST_ID = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AD.computeHostId LIKE :" + DBConstants.ApplicationDeployment.COMPUTE_HOST_ID;
String GET_ALL_APPLICATION_DEPLOYMENTS = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD";
String FIND_ACCESSIBLE_APPLICATION_DEPLOYMENTS = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AD.gatewayId LIKE :" + DBConstants.ApplicationDeployment.GATEWAY_ID + " AND AD.appDeploymentId IN :" +
DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS + " AND AD.computeHostId IN :" +
DBConstants.ApplicationDeployment.ACCESSIBLE_COMPUTE_HOST_IDS;
String FIND_ACCESSIBLE_APPLICATION_DEPLOYMENTS_FOR_APP_MODULE = "SELECT AD FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AD.gatewayId LIKE :" + DBConstants.ApplicationDeployment.GATEWAY_ID + " AND AD.appDeploymentId IN :" +
DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS + " AND AD.computeHostId IN :" +
DBConstants.ApplicationDeployment.ACCESSIBLE_COMPUTE_HOST_IDS + " AND AD.appModuleId = :" +
DBConstants.ApplicationDeployment.APPLICATION_MODULE_ID;
// Application Module Queries
String FIND_APPLICATION_MODULES_FOR_GATEWAY_ID = "SELECT AM FROM " + ApplicationModuleEntity.class.getSimpleName() + " AM " +
"WHERE AM.gatewayId LIKE :" + DBConstants.ApplicationModule.GATEWAY_ID;
String FIND_APPLICATION_MODULES_FOR_APPLICATION_MODULE_NAME = "SELECT AM FROM " + ApplicationModuleEntity.class.getSimpleName() + " AM " +
"WHERE AM.appModuleName LIKE :" + DBConstants.ApplicationModule.APPLICATION_MODULE_NAME;
String FIND_ACCESSIBLE_APPLICATION_MODULES = "SELECT AM FROM " + ApplicationModuleEntity.class.getSimpleName() + " AM " +
"WHERE AM.gatewayId LIKE :" + DBConstants.ApplicationModule.GATEWAY_ID + " AND " +
"EXISTS (SELECT 1 FROM " + ApplicationDeploymentEntity.class.getSimpleName() + " AD " +
"WHERE AM.appModuleId = AD.appModuleId AND AD.appDeploymentId IN :" + DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS +
" AND AD.computeHostId IN :" + DBConstants.ApplicationDeployment.ACCESSIBLE_COMPUTE_HOST_IDS + ")";
// Application Interface Queries
String FIND_APPLICATION_INTERFACES_FOR_GATEWAY_ID = "SELECT AI FROM " + ApplicationInterfaceEntity.class.getSimpleName() + " AI " +
"WHERE AI.gatewayId LIKE :" + DBConstants.ApplicationInterface.GATEWAY_ID;
String FIND_APPLICATION_INTERFACES_FOR_APPLICATION_NAME = "SELECT AI FROM " + ApplicationInterfaceEntity.class.getSimpleName() + " AI " +
"WHERE AI.applicationName LIKE :" + DBConstants.ApplicationInterface.APPLICATION_NAME;
String GET_ALL_APPLICATION_INTERFACES = "SELECT AI FROM " + ApplicationInterfaceEntity.class.getSimpleName() + " AI";
// Application Inputs Queries
String FIND_APPLICATION_INPUTS = "SELECT AI FROM " + ApplicationInputEntity.class.getSimpleName() + " AI " +
"WHERE AI.interfaceId LIKE :" + DBConstants.ApplicationInput.APPLICATION_INTERFACE_ID;
// Application Outputs Queries
String FIND_APPLICATION_OUTPUTS = "SELECT AI FROM " + ApplicationOutputEntity.class.getSimpleName() + " AI " +
"WHERE AI.interfaceId LIKE :" + DBConstants.ApplicationOutput.APPLICATION_INTERFACE_ID;
String FIND_ALL_GATEWAY_PROFILES = "SELECT G FROM " + GatewayProfileEntity.class.getSimpleName() + " G";
String FIND_ALL_COMPUTE_RESOURCE_PREFERENCES = "SELECT DISTINCT CR FROM " + ComputeResourcePreferenceEntity.class.getSimpleName() + " CR " +
"WHERE CR.gatewayId LIKE :" + DBConstants.ComputeResourcePreference.GATEWAY_ID;
String FIND_ALL_STORAGE_RESOURCE_PREFERENCES = "SELECT DISTINCT S FROM " + StoragePreferenceEntity.class.getSimpleName() + " S " +
"WHERE S.gatewayId LIKE :" + DBConstants.StorageResourcePreference.GATEWAY_ID;
String FIND_COMPUTE_RESOURCE = "SELECT DISTINCT CR FROM " + ComputeResourceEntity.class.getSimpleName() + " CR " +
"WHERE CR.hostName LIKE :" + DBConstants.ComputeResource.HOST_NAME;
String FIND_ALL_COMPUTE_RESOURCES = "SELECT CR FROM " + ComputeResourceEntity.class.getSimpleName() + " CR";
String GET_FILE_SYSTEM = "SELECT DISTINCT FS FROM " + ComputeResourceFileSystemEntity.class.getSimpleName() + " FS " +
"WHERE FS.computeResourceId LIKE :" + DBConstants.ComputeResource.COMPUTE_RESOURCE_ID;
String GET_JOB_MANAGER_COMMAND = "SELECT DISTINCT JM FROM " + JobManagerCommandEntity.class.getSimpleName() + " JM " +
"WHERE JM.resourceJobManagerId LIKE :" + DBConstants.ResourceJobManager.RESOURCE_JOB_MANAGER_ID;
String GET_PARALLELISM_PREFIX = "SELECT DISTINCT PF FROM " + ParallelismCommandEntity.class.getSimpleName() + " PF " +
"WHERE PF.resourceJobManagerId LIKE :" + DBConstants.ResourceJobManager.RESOURCE_JOB_MANAGER_ID;
String FIND_ACCESSIBLE_GROUP_RESOURCE_PROFILES = "SELECT G FROM " + GroupResourceProfileEntity.class.getSimpleName() + " G " +
"WHERE G.gatewayId LIKE :" + DBConstants.GroupResourceProfile.GATEWAY_ID + " AND G.groupResourceProfileId IN :"
+ DBConstants.GroupResourceProfile.ACCESSIBLE_GROUP_RESOURCE_IDS;
String FIND_ALL_GROUP_COMPUTE_PREFERENCES = "SELECT GC FROM " + GroupComputeResourcePrefEntity.class.getSimpleName() + " GC " +
"WHERE GC.groupResourceProfileId LIKE :" + DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID;
String FIND_ALL_GROUP_BATCH_QUEUE_RESOURCE_POLICY = "SELECT BQ FROM " + BatchQueueResourcePolicyEntity.class.getSimpleName() + " BQ " +
"WHERE BQ.groupResourceProfileId LIKE :" + DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID;
String FIND_ALL_GROUP_COMPUTE_RESOURCE_POLICY = "SELECT CR FROM " + ComputeResourcePolicyEntity.class.getSimpleName() + " CR " +
"WHERE CR.groupResourceProfileId LIKE :" + DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID;
String GET_ALL_USER_RESOURCE_PROFILE = "SELECT URP FROM " + UserResourceProfileEntity.class.getSimpleName() + " URP";
String GET_ALL_GATEWAY_ID = "SELECT DISTINCT URP FROM " + UserResourceProfileEntity.class.getSimpleName() + " URP " +
"WHERE URP.gatewayId LIKE :" + DBConstants.UserResourceProfile.GATEWAY_ID;
String GET_ALL_GATEWAYS = "SELECT G FROM " + GatewayEntity.class.getSimpleName() + " G";
String GET_GATEWAY_FROM_GATEWAY_NAME = "SELECT G FROM " + GatewayEntity.class.getSimpleName() + " G " +
"WHERE G.gatewayName LIKE :" + DBConstants.Gateway.GATEWAY_NAME;
String GET_ALL_GATEWAY_NOTIFICATIONS = "SELECT N FROM " + NotificationEntity.class.getSimpleName() + " N " +
"WHERE N.gatewayId LIKE :" + DBConstants.Notification.GATEWAY_ID;
String GET_ALL_GATEWAY_USERS = "SELECT U FROM " + UserEntity.class.getSimpleName() + " U " +
"WHERE U.gatewayId LIKE :" + DBConstants.User.GATEWAY_ID;
String GET_ALL_PROJECTS_FOR_OWNER = "SELECT P FROM " + ProjectEntity.class.getSimpleName() + " P " +
"WHERE P.owner LIKE :" + DBConstants.Project.OWNER;
String GET_EXPERIMENTS_FOR_USER = "SELECT E FROM " + ExperimentEntity.class.getSimpleName() + " E " +
"WHERE E.userName LIKE :" + DBConstants.Experiment.USER_NAME +
" AND E.gatewayId = :" + DBConstants.Experiment.GATEWAY_ID;
String GET_EXPERIMENTS_FOR_PROJECT_ID = "SELECT E FROM " + ExperimentEntity.class.getSimpleName() + " E " +
"WHERE E.projectId LIKE :" + DBConstants.Experiment.PROJECT_ID +
" AND E.gatewayId = :" + DBConstants.Experiment.GATEWAY_ID;
String GET_EXPERIMENTS_FOR_GATEWAY_ID = "SELECT E FROM " + ExperimentEntity.class.getSimpleName() + " E " +
"WHERE E.gatewayId LIKE :" + DBConstants.Experiment.GATEWAY_ID;
String GET_PROCESS_FOR_EXPERIMENT_ID = "SELECT P FROM " + ProcessEntity.class.getSimpleName() + " P " +
"WHERE P.experimentId = :" + DBConstants.Process.EXPERIMENT_ID;
String GET_TASK_FOR_PARENT_PROCESS_ID = "SELECT T FROM " + TaskEntity.class.getSimpleName() + " T " +
"WHERE T.parentProcessId LIKE :" + DBConstants.Task.PARENT_PROCESS_ID;
String GET_JOB_FOR_PROCESS_ID = "SELECT J FROM " + JobEntity.class.getSimpleName() + " J " +
"WHERE J.processId LIKE :" + DBConstants.Job.PROCESS_ID;
String GET_JOB_FOR_TASK_ID = "SELECT J FROM " + JobEntity.class.getSimpleName() + " J " +
"WHERE J.taskId LIKE :" + DBConstants.Job.TASK_ID;
String GET_JOB_FOR_JOB_ID = "SELECT J FROM " + JobEntity.class.getSimpleName() + " J " +
"WHERE J.jobId LIKE :" + DBConstants.Job.JOB_ID;
String GET_ALL_QUEUE_STATUS_MODELS = "SELECT QSM FROM " + QueueStatusEntity.class.getSimpleName() + " QSM";
String GET_ALL_USER_COMPUTE_RESOURCE_PREFERENCE = "SELECT UCRP FROM " + UserComputeResourcePreferenceEntity.class.getSimpleName() + " UCRP " +
"WHERE UCRP.userId LIKE :" + DBConstants.UserComputeResourcePreference.USER_ID + " AND UCRP.gatewayId LIKE :" +
DBConstants.UserComputeResourcePreference.GATEWAY_ID;
String GET_ALL_USER_STORAGE_PREFERENCE = "SELECT USP FROM " + UserStoragePreferenceEntity.class.getSimpleName() + " USP " +
"WHERE USP.userId LIKE :" + DBConstants.UserStoragePreference.USER_ID + " AND USP.gatewayId LIKE :" +
DBConstants.UserStoragePreference.GATEWAY_ID;
String FIND_ALL_CHILD_DATA_PRODUCTS = "SELECT DP FROM " + DataProductEntity.class.getSimpleName() + " DP " +
"WHERE DP.parentProductUri LIKE :" + DBConstants.DataProduct.PARENT_PRODUCT_URI;
String FIND_DATA_PRODUCT_BY_NAME = "SELECT DP FROM " + DataProductEntity.class.getSimpleName() + " DP " +
"WHERE DP.gatewayId LIKE :" + DBConstants.DataProduct.GATEWAY_ID + " AND DP.ownerName LIKE :" +
DBConstants.DataProduct.OWNER_NAME + " AND dp.productName LIKE :" + DBConstants.DataProduct.PRODUCT_NAME;
String GET_WORKFLOW_FOR_EXPERIMENT_ID = "SELECT W FROM " + AiravataWorkflowEntity.class.getSimpleName() + " W " +
"WHERE W.experimentId LIKE :" + DBConstants.Workflow.EXPERIMENT_ID;
String FIND_STORAGE_RESOURCE = "SELECT DISTINCT SR FROM " + StorageResourceEntity.class.getSimpleName() + " SR " +
"WHERE SR.hostName LIKE :" + DBConstants.StorageResource.HOST_NAME;
String FIND_ALL_STORAGE_RESOURCES = "SELECT SR FROM " + StorageResourceEntity.class.getSimpleName() + " SR";
String FIND_ALL_AVAILABLE_STORAGE_RESOURCES = "SELECT SR FROM " + StorageResourceEntity.class.getSimpleName() + " SR " +
"WHERE SR.enabled = TRUE";
String FIND_ALL_GRID_FTP_ENDPOINTS_BY_DATA_MOVEMENT = "SELECT GFE FROM " + GridftpEndpointEntity.class.getSimpleName() +
" GFE WHERE GFE.gridftpDataMovement.dataMovementInterfaceId LIKE :" + DBConstants.DataMovement.GRID_FTP_DATA_MOVEMENT_ID;
String FIND_PARSING_TEMPLATES_FOR_APPLICATION_INTERFACE_ID = "SELECT PT FROM " + ParsingTemplateEntity.class.getSimpleName() + " PT " +
"WHERE PT.applicationInterface = :" + DBConstants.ParsingTemplate.APPLICATION_INTERFACE_ID;
String FIND_ALL_PARSING_TEMPLATES_FOR_GATEWAY_ID = "SELECT PT FROM " + ParsingTemplateEntity.class.getSimpleName() + " PT " +
"WHERE PT.gatewayId = :" + DBConstants.ParsingTemplate.GATEWAY_ID;
String FIND_ALL_PARSERS_FOR_GATEWAY_ID = "SELECT P FROM " + ParserEntity.class.getSimpleName() + " P " +
"WHERE P.gatewayId = :" + DBConstants.Parser.GATEWAY_ID;
String FIND_QUEUE_STATUS = "SELECT L FROM " + QueueStatusEntity.class.getSimpleName() + " L WHERE L.hostName LIKE :"
+ DBConstants.QueueStatus.HOST_NAME + " AND L.queueName LIKE :" + DBConstants.QueueStatus.QUEUE_NAME +" ORDER BY L.time DESC";
String FIND_PROCESS_WITH_STATUS = "SELECT P FROM " + ProcessStatusEntity.class.getSimpleName() + " P " +
" where P.state = :" + DBConstants.ProcessStatus.STATE;
String GET_ALL_PROCESSES = "SELECT P FROM " + ProcessEntity.class.getSimpleName() +" P ";
String DELETE_JOB_NATIVE_QUERY = "DELETE FROM JOB WHERE JOB_ID = ?1 AND TASK_ID = ?2";
String FIND_JOB_COUNT_NATIVE_QUERY = "SELECT DISTINCT JS.JOB_ID FROM JOB_STATUS JS WHERE JS.JOB_ID IN " +
"(SELECT J.JOB_ID FROM JOB J where J.PROCESS_ID IN " +
"(SELECT P.PROCESS_ID FROM PROCESS P where P.EXPERIMENT_ID IN " +
"(SELECT E.EXPERIMENT_ID FROM EXPERIMENT E where E.GATEWAY_ID= ?1))) " +
"AND JS.STATE = ?2 and JS.TIME_OF_STATE_CHANGE > now() - interval ?3 minute";
String FIND_AVG_TIME_UPTO_METASCHEDULER_NATIVE_QUERY = "SELECT AVG(difference) FROM(select es.TIME_OF_STATE_CHANGE AS esTime1, ps.TIME_OF_STATE_CHANGE as psTime1, " +
" TIMESTAMPDIFF(MICROSECOND, es.TIME_OF_STATE_CHANGE, ps.TIME_OF_STATE_CHANGE) AS difference FROM EXPERIMENT_STATUS es, " +
" EXPERIMENT_STATUS ps WHERE es.EXPERIMENT_ID IN (select EXPERIMENT_ID FROM EXPERIMENT WHERE GATEWAY_ID= ?1) " +
" AND ps.EXPERIMENT_ID=es.EXPERIMENT_ID AND es.STATE='CREATED' AND (ps.STATE='SCHEDULED' OR (ps.STATE='LAUNCHED ' " +
" AND ps.EXPERIMENT_ID NOT IN(select ps1.EXPERIMENT_ID FROM EXPERIMENT_STATUS ps1 WHERE ps1.STATE='SCHEDULED'))" +
" AND ps.TIME_OF_STATE_CHANGE <= ALL(select ps1.TIME_OF_STATE_CHANGE FROM EXPERIMENT_STATUS ps1 WHERE " +
" ps1.EXPERIMENT_ID=ps.EXPERIMENT_ID AND ps1.STATE='SCHEDULED')) " +
" AND es.TIME_OF_STATE_CHANGE > now()-interval ?2 minute) abstract_t";
String FIND_AVG_TIME_QUEUED_NATIVE_QUERY = "SELECT AVG(difference) FROM (SELECT es.TIME_OF_STATE_CHANGE AS esTime1, ps.TIME_OF_STATE_CHANGE as psTime1, " +
" TIMESTAMPDIFF(MICROSECOND, es.TIME_OF_STATE_CHANGE, ps.TIME_OF_STATE_CHANGE) AS difference FROM EXPERIMENT_STATUS es," +
" EXPERIMENT_STATUS ps WHERE es.EXPERIMENT_ID IN (select EXPERIMENT_ID FROM EXPERIMENT WHERE GATEWAY_ID=?1) " +
" AND ps.EXPERIMENT_ID=es.EXPERIMENT_ID AND es.STATE='SCHEDULED' AND ps.STATE='LAUNCHED' " +
" AND ps.TIME_OF_STATE_CHANGE >= ALL(SELECT ps1.TIME_OF_STATE_CHANGE FROM EXPERIMENT_STATUS ps1 " +
" WHERE ps1.EXPERIMENT_ID=ps.EXPERIMENT_ID AND ps1.STATE='LAUNCHED') AND " +
" es.TIME_OF_STATE_CHANGE <= ALL(SELECT ps1.TIME_OF_STATE_CHANGE FROM EXPERIMENT_STATUS ps1 " +
" WHERE ps1.EXPERIMENT_ID=es.EXPERIMENT_ID AND ps1.STATE='SCHEDULED') AND es.TIME_OF_STATE_CHANGE > now()-interval ?2 minute)abstract_t";
String FIND_AVG_TIME_HELIX_NATIVE_QUERY = "SELECT AVG(difference) FROM(SELECT es.TIME_OF_STATE_CHANGE AS esTime1, ps.TIME_OF_STATE_CHANGE as psTime1, " +
" TIMESTAMPDIFF(MICROSECOND, es.TIME_OF_STATE_CHANGE, ps.TIME_OF_STATE_CHANGE) AS difference from EXPERIMENT_STATUS es, " +
" JOB_STATUS ps where es.EXPERIMENT_ID IN (SELECT EXPERIMENT_ID FROM EXPERIMENT WHERE GATEWAY_ID=?1) " +
" AND ps.JOB_ID IN(SELECT j.JOB_ID FROM JOB j where j.PROCESS_ID IN(SELECT DISTINCT p.PROCESS_ID FROM PROCESS p " +
" WHERE p.EXPERIMENT_ID=es.EXPERIMENT_ID)) AND ps.TASK_ID IN(SELECT j.TASK_ID FROM JOB j where j.PROCESS_ID IN(SELECT DISTINCT p.PROCESS_ID FROM PROCESS p " +
" WHERE p.EXPERIMENT_ID=es.EXPERIMENT_ID)) AND es.STATE='LAUNCHED' AND ps.STATE='SUBMITTED' " +
" AND ps.TIME_OF_STATE_CHANGE >= ALL(SELECT ps1.TIME_OF_STATE_CHANGE FROM JOB_STATUS ps1 WHERE ps1.JOB_ID=ps.JOB_ID " +
" AND ps1.STATE='SUBMITTED') AND es.TIME_OF_STATE_CHANGE >= ALL(SELECT es1.TIME_OF_STATE_CHANGE FROM EXPERIMENT_STATUS es1 " +
" WHERE es1.EXPERIMENT_ID=es.EXPERIMENT_ID AND es1.STATE='LAUNCHED') AND es.TIME_OF_STATE_CHANGE > now()-interval ?2 minute) abstract_t";
}
| 828 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ReplicaCatalogDBInitConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.JDBCConfig;
public class ReplicaCatalogDBInitConfig implements DBInitConfig {
public static final String CHECK_TABLE = "CONFIGURATION";
private String dbInitScriptPrefix = "database_scripts/replicacatalog";
@Override
public JDBCConfig getJDBCConfig() {
return new ReplicaCatalogJDBCConfig();
}
@Override
public String getDBInitScriptPrefix() {
return dbInitScriptPrefix;
}
public ReplicaCatalogDBInitConfig setDbInitScriptPrefix(String dbInitScriptPrefix) {
this.dbInitScriptPrefix = dbInitScriptPrefix;
return this;
}
@Override
public String getCheckTableName() {
return CHECK_TABLE;
}
}
| 829 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/WorkflowCatalogJDBCConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
public class WorkflowCatalogJDBCConfig implements JDBCConfig {
private static final String WORKFLOW_CATALOG_JDBC_DRIVER = "workflowcatalog.jdbc.driver";
private static final String WORKFLOW_CATALOG_JDBC_URL = "workflowcatalog.jdbc.url";
private static final String WORKFLOW_CATALOG_JDBC_USER = "workflowcatalog.jdbc.user";
private static final String WORKFLOW_CATALOG_JDBC_PASSWORD = "workflowcatalog.jdbc.password";
private static final String WORKFLOW_CATALOG_VALIDATION_QUERY = "workflowcatalog.validationQuery";
@Override
public String getURL() {
return readServerProperties(WORKFLOW_CATALOG_JDBC_URL);
}
@Override
public String getDriver() {
return readServerProperties(WORKFLOW_CATALOG_JDBC_DRIVER);
}
@Override
public String getUser() {
return readServerProperties(WORKFLOW_CATALOG_JDBC_USER);
}
@Override
public String getPassword() {
return readServerProperties(WORKFLOW_CATALOG_JDBC_PASSWORD);
}
@Override
public String getValidationQuery() {
return readServerProperties(WORKFLOW_CATALOG_VALIDATION_QUERY);
}
private String readServerProperties(String propertyName) {
try {
return ServerSettings.getSetting(propertyName);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Unable to read airavata-server.properties...", e);
}
}
}
| 830 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/Committer.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
@FunctionalInterface
public interface Committer<T, R> {
R commit(T t);
} | 831 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ExpCatalogDBInitConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.user.UserProfile;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.model.workspace.GatewayApprovalStatus;
import org.apache.airavata.registry.core.repositories.expcatalog.GatewayRepository;
import org.apache.airavata.registry.core.repositories.expcatalog.UserRepository;
public class ExpCatalogDBInitConfig implements DBInitConfig {
private String dbInitScriptPrefix = "database_scripts/expcatalog";
@Override
public JDBCConfig getJDBCConfig() {
return new ExpCatalogJDBCConfig();
}
@Override
public String getDBInitScriptPrefix() {
return this.dbInitScriptPrefix;
}
public ExpCatalogDBInitConfig setDbInitScriptPrefix(String dbInitScriptPrefix) {
this.dbInitScriptPrefix = dbInitScriptPrefix;
return this;
}
@Override
public String getCheckTableName() {
return "CONFIGURATION";
}
@Override
public void postInit() {
try {
// Create default gateway and default user if not already created
GatewayRepository gatewayRepository = new GatewayRepository();
String defaultGatewayId = ServerSettings.getDefaultUserGateway();
if (!gatewayRepository.isGatewayExist(defaultGatewayId)) {
Gateway gateway = new Gateway();
gateway.setGatewayId(defaultGatewayId);
gateway.setGatewayApprovalStatus(GatewayApprovalStatus.APPROVED);
gateway.setOauthClientId(ServerSettings.getSetting("default.registry.oauth.client.id"));
gateway.setOauthClientSecret(ServerSettings.getSetting("default.registry.oauth.client.secret"));
gatewayRepository.addGateway(gateway);
}
UserRepository userRepository = new UserRepository();
String defaultUsername = ServerSettings.getDefaultUser();
if (!userRepository.isUserExists(defaultGatewayId, defaultUsername)) {
UserProfile defaultUser = new UserProfile();
defaultUser.setUserId(defaultUsername);
defaultUser.setGatewayId(defaultGatewayId);
userRepository.addUser(defaultUser);
}
} catch (Exception e) {
throw new RuntimeException("Failed to post-initialize the expcatalog database", e);
}
}
}
| 832 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ExpCatalogUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils;
import java.util.UUID;
public class ExpCatalogUtils {
public static String getID (String name){
String pro = name.replaceAll("\\s", "");
return pro + "_" + UUID.randomUUID();
}
}
| 833 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/CustomBeanFactory.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.thrift.TBase;
import org.apache.thrift.TFieldIdEnum;
import org.apache.thrift.TFieldRequirementType;
import org.apache.thrift.meta_data.FieldMetaData;
import org.dozer.BeanFactory;
import org.dozer.util.MappingUtils;
import org.dozer.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CustomBeanFactory implements BeanFactory {
private final static Logger logger = LoggerFactory.getLogger(CustomBeanFactory.class);
@Override
public Object createBean(Object source, Class<?> sourceClass, String targetBeanId) {
Object result;
Class<?> destClass = MappingUtils.loadClass(targetBeanId);
if (logger.isDebugEnabled()) {
logger.debug("Creating bean of type " + destClass.getSimpleName());
}
result = ReflectionUtils.newInstance(destClass);
if (result instanceof TBase) {
callSettersOnThriftFieldsWithDefaults((TBase) result);
}
return result;
}
/**
* Thrift fields with default values aren't serialized and sent over the wire if
* their setters were never called. However, Dozer doesn't call the setter on
* the field of a target object when the target field's value already matches
* the source's field value. This results in the Thrift data model object field
* having the default value but it doesn't get serialized with that value (and
* for required fields validation fails). The following changes the semantics of
* defaulted Thrift fields a bit so that they are always "set" even if the
* source object had no such field, but this matches the more general semantics
* of what is expected from fields that have default values and it works around
* an annoyance with required default fields that would fail validation
* otherwise.
*
* <p>
* See AIRAVATA-3268 and AIRAVATA-3328 for more information.
*
* @param <T>
* @param <F>
* @param instance
*/
private <T extends TBase<T, F>, F extends TFieldIdEnum> void callSettersOnThriftFieldsWithDefaults(
TBase<T, F> instance) {
try {
Field metaDataMapField = instance.getClass().getField("metaDataMap");
Map<F, FieldMetaData> metaDataMap = (Map<F, FieldMetaData>) metaDataMapField.get(null);
for (Entry<F, FieldMetaData> metaDataEntry : metaDataMap.entrySet()) {
if (logger.isDebugEnabled()) {
logger.debug("processing field " + metaDataEntry.getValue().fieldName);
}
Object fieldValue = instance.getFieldValue(metaDataEntry.getKey());
if (fieldValue != null) {
if (logger.isDebugEnabled()) {
logger.debug("field " + metaDataEntry.getValue().fieldName + " has a default value ["
+ fieldValue + "], calling setter to force the field to be set");
}
instance.setFieldValue(metaDataEntry.getKey(), fieldValue);
}
}
} catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
MappingUtils.throwMappingException(e);
}
}
}
| 834 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ExpCatalogJDBCConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
public class ExpCatalogJDBCConfig implements JDBCConfig {
private static final String EXPCATALOG_JDBC_DRIVER = "registry.jdbc.driver";
private static final String EXPCATALOG_JDBC_URL = "registry.jdbc.url";
private static final String EXPCATALOG_JDBC_USER = "registry.jdbc.user";
private static final String EXPCATALOG_JDBC_PWD = "registry.jdbc.password";
private static final String EXPCATALOG_VALIDATION_QUERY = "validationQuery";
@Override
public String getURL() {
return readServerProperties(EXPCATALOG_JDBC_URL);
}
@Override
public String getDriver() {
return readServerProperties(EXPCATALOG_JDBC_DRIVER);
}
@Override
public String getUser() {
return readServerProperties(EXPCATALOG_JDBC_USER);
}
@Override
public String getPassword() {
return readServerProperties(EXPCATALOG_JDBC_PWD);
}
@Override
public String getValidationQuery() {
return readServerProperties(EXPCATALOG_VALIDATION_QUERY);
}
private String readServerProperties(String propertyName) {
try {
return ServerSettings.getSetting(propertyName);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Unable to read airavata-server.properties...", e);
}
}
}
| 835 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/DBConstants.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils;
public class DBConstants {
public static int SELECT_MAX_ROWS = 1000;
public static final String CONFIGURATION = "Configuration";
public static final String WORKFLOW = "Workflow";
public static class ApplicationDeployment {
public static final String APPLICATION_MODULE_ID = "appModuleId";
public static final String COMPUTE_HOST_ID = "computeHostId";
public static final String GATEWAY_ID = "gatewayId";
public static final String ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS = "accessibleAppDeploymentIds";
public static final String ACCESSIBLE_COMPUTE_HOST_IDS = "accessibleComputeHostIds";
}
public static class ApplicationModule {
public static final String APPLICATION_MODULE_NAME = "appModuleName";
public static final String GATEWAY_ID = "gatewayId";
}
public static class ApplicationInterface {
public static final String APPLICATION_NAME = "applicationName";
public static final String GATEWAY_ID = "gatewayId";
}
public static class ApplicationInput {
public static final String APPLICATION_INTERFACE_ID = "interfaceId";
}
public static class ApplicationOutput {
public static final String APPLICATION_INTERFACE_ID = "interfaceId";
}
public static class ComputeResourcePreference {
public static final String GATEWAY_ID = "gatewayId";
}
public static class StorageResourcePreference {
public static final String GATEWAY_ID = "gatewayId";
}
public static class ComputeResource {
public static final String HOST_NAME = "hostName";
public static final String COMPUTE_RESOURCE_ID = "computeResourceId";
}
public static class StorageResource {
public static final String HOST_NAME = "hostName";
}
public static class ResourceJobManager {
public static final String RESOURCE_JOB_MANAGER_ID = "resourceJobManagerId";
}
public static class GroupResourceProfile {
public static final String GATEWAY_ID = "gatewayId";
public static final String GROUP_RESOURCE_PROFILE_ID = "groupResourceProfileId";
public static final String ACCESSIBLE_GROUP_RESOURCE_IDS = "accessibleGroupResProfileIds";
}
public static class UserResourceProfile {
public static final String USER_ID = "userId";
public static final String GATEWAY_ID = "gatewayId";
}
public static class Gateway {
public static final String GATEWAY_NAME = "gatewayName";
}
public static class User {
public static final String USER_NAME = "userId";
public static final String GATEWAY_ID = "gatewayId";
}
public static class Notification {
public static final String GATEWAY_ID = "gatewayId";
}
public static class Project {
public static final String GATEWAY_ID = "gatewayId";
public static final String OWNER = "owner";
public static final String PROJECT_NAME = "name";
public static final String DESCRIPTION = "description";
public static final String ACCESSIBLE_PROJECT_IDS = "accessibleProjectIds";
public static final String CREATION_TIME = "creationTime";
}
public static class Experiment {
public static final String USER_NAME = "userName";
public static final String PROJECT_ID = "projectId";
public static final String GATEWAY_ID = "gatewayId";
public static final String EXPERIMENT_ID = "experimentId";
public static final String EXPERIMENT_NAME = "experimentName";
public static final String DESCRIPTION = "description";
public static final String EXECUTION_ID = "executionId";
public static final String CREATION_TIME = "creationTime";
public static final String RESOURCE_HOST_ID = "resourceHostId";
public static final String ACCESSIBLE_EXPERIMENT_IDS = "accessibleExperimentIds";
}
public final class ExperimentStatus {
public static final String EXPERIMENT_ID = "experimentId";
public static final String STATE = "state";
public static final String REASON = "reason";
}
public static class Process {
public static final String EXPERIMENT_ID = "experimentId";
}
public static class Task {
public static final String PARENT_PROCESS_ID = "parentProcessId";
}
public static class Job {
public static final String PROCESS_ID = "processId";
public static final String TASK_ID = "taskId";
public static final String JOB_ID = "jobId";
public static final String GATEWAY_ID="gatewayId";
public static final String TIME_INTERVAL="timeInterval";
public static final String JOB_STATUS="jobStatus";
}
public static class ExperimentSummary {
public static final String EXPERIMENT_STATUS = "experimentStatus";
public static final String FROM_DATE = "fromDate";
public static final String TO_DATE = "toDate";
}
public static class UserComputeResourcePreference {
public static final String USER_ID = "userId";
public static final String GATEWAY_ID = "gatewayId";
public static final String COMPUTE_RESOURCE_ID = "computeResourceId";
}
public static class UserStoragePreference {
public static final String USER_ID = "userId";
public static final String GATEWAY_ID = "gatewayId";
public static final String STORAGE_RESOURCE_ID = "storageResourceId";
}
public static class DataProduct {
public static final String GATEWAY_ID = "gatewayId";
public static final String OWNER_NAME = "ownerName";
public static final String PRODUCT_NAME = "productName";
public static final String PARENT_PRODUCT_URI = "parentProductUri";
}
public static class Workflow {
public static final String EXPERIMENT_ID = "experimentId";
}
public static class DataMovement {
public static final String GRID_FTP_DATA_MOVEMENT_ID = "dataMovementId";
}
public static class ParsingTemplate {
public static final String GATEWAY_ID = "gatewayId";
public static final String APPLICATION_INTERFACE_ID = "applicationInterfaceId";
}
public static class Parser {
public static final String GATEWAY_ID = "gatewayId";
}
public static class QueueStatus {
public static final String HOST_NAME = "hostName";
public static final String QUEUE_NAME = "queueName";
}
public static class ProcessStatus {
public static final String STATE = "state";
public static final String TIME_OF_STATE_CHANGE = "timeOfStateChange";
public static final String PROCESS_ID = "processId";
}
public static class MetaData {
public static final String ORCH_TIME = "orchTime";
public static final String QUEUED_TIME = "queuedTime";
public static final String HELIX = "helix";
}
}
| 836 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/WorkflowCatalogDBInitConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.JDBCConfig;
public class WorkflowCatalogDBInitConfig implements DBInitConfig {
private String dbInitScriptPrefix = "database_scripts/airavataworkflowcatalog";
@Override
public JDBCConfig getJDBCConfig() {
return new WorkflowCatalogJDBCConfig();
}
@Override
public String getDBInitScriptPrefix() {
return dbInitScriptPrefix;
}
public WorkflowCatalogDBInitConfig setDbInitScriptPrefix(String dbInitScriptPrefix) {
this.dbInitScriptPrefix = dbInitScriptPrefix;
return this;
}
@Override
public String getCheckTableName() {
return "CONFIGURATION";
}
}
| 837 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/AppCatalogDBInitConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
import org.apache.airavata.registry.core.repositories.appcatalog.GwyResourceProfileRepository;
import org.apache.airavata.registry.core.repositories.appcatalog.UserResourceProfileRepository;
public class AppCatalogDBInitConfig implements DBInitConfig {
private String dbInitScriptPrefix = "database_scripts/appcatalog";
@Override
public JDBCConfig getJDBCConfig() {
return new AppCatalogJDBCConfig();
}
@Override
public String getDBInitScriptPrefix() {
return dbInitScriptPrefix;
}
public AppCatalogDBInitConfig setDbInitScriptPrefix(String dbInitScriptPrefix) {
this.dbInitScriptPrefix = dbInitScriptPrefix;
return this;
}
@Override
public String getCheckTableName() {
return "GATEWAY_PROFILE";
}
@Override
public void postInit() {
GwyResourceProfileRepository gwyResourceProfileRepository = new GwyResourceProfileRepository();
try {
GatewayResourceProfile gatewayResourceProfile = new GatewayResourceProfile();
gatewayResourceProfile.setGatewayID(ServerSettings.getDefaultUserGateway());
if (!gwyResourceProfileRepository.isGatewayResourceProfileExists(gatewayResourceProfile.getGatewayID())) {
gwyResourceProfileRepository.addGatewayResourceProfile(gatewayResourceProfile);
}
} catch (Exception e) {
throw new RuntimeException("Failed to create default gateway for app catalog", e);
}
}
}
| 838 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/ObjectMapperSingleton.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.commons.lang3.ClassUtils;
import org.apache.thrift.TBase;
import org.apache.thrift.TFieldIdEnum;
import org.dozer.CustomFieldMapper;
import org.dozer.DozerBeanMapper;
import org.dozer.classmap.ClassMap;
import org.dozer.fieldmap.FieldMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
public class ObjectMapperSingleton extends DozerBeanMapper{
private final static Logger logger = LoggerFactory.getLogger(ObjectMapperSingleton.class);
private static ObjectMapperSingleton instance;
private ObjectMapperSingleton(){}
public static ObjectMapperSingleton getInstance(){
if(instance == null) {
instance = new ObjectMapperSingleton();
instance.setMappingFiles(
new ArrayList<String>(){{
add("dozer_mapping.xml");
}});
instance.setCustomFieldMapper(new SkipUnsetPrimitiveFieldMapper());
}
return instance;
}
private static class SkipUnsetPrimitiveFieldMapper implements CustomFieldMapper {
@Override
public boolean mapField(Object source, Object destination, Object sourceFieldValue, ClassMap classMap, FieldMap fieldMap) {
// Just skipping mapping field if not set on Thrift source model and the field's value is primitive
if (isSourceUnsetThriftField(source, fieldMap) && sourceFieldValue != null && ClassUtils.isPrimitiveOrWrapper(sourceFieldValue.getClass())) {
logger.debug("Skipping field " + fieldMap.getSrcFieldName() + " since it is unset thrift field and is primitive");
return true;
}
return false;
}
private boolean isSourceUnsetThriftField(Object source, FieldMap fieldMap) {
if (source instanceof TBase) {
TBase thriftSource = (TBase) source;
try {
Class<?> thriftFieldsEnum = Class.forName(thriftSource.getClass().getName() + "$_Fields");
TFieldIdEnum srcField = (TFieldIdEnum) thriftFieldsEnum.getMethod(
"findByName", String.class).invoke(null, fieldMap.getSrcFieldName());
// FIXME: Dozer can handle case insensitive field matching, for example, "gatewayID" maps to
// "gatewayId" but this method of looking up field by name is case sensitive. For example,
// it fails to find "gatewayID" on GatewayResourceProfile.
if (srcField != null && !thriftSource.isSet(srcField)) {
return true;
}
} catch (Exception e) {
throw new RuntimeException("Thrift model class has no _Fields enum", e);
}
}
return false;
}
}
}
| 839 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/AppCatalogJDBCConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.ServerSettings;
public class AppCatalogJDBCConfig implements JDBCConfig {
private static final String APPCATALOG_JDBC_DRIVER = "appcatalog.jdbc.driver";
private static final String APPCATALOG_JDBC_URL = "appcatalog.jdbc.url";
private static final String APPCATALOG_JDBC_USER = "appcatalog.jdbc.user";
private static final String APPCATALOG_JDBC_PWD = "appcatalog.jdbc.password";
private static final String APPCATALOG_JDBC_VALIDATION_QUERY = "appcatalog.validationQuery";
@Override
public String getURL() {
return readServerProperties(APPCATALOG_JDBC_URL);
}
@Override
public String getDriver() {
return readServerProperties(APPCATALOG_JDBC_DRIVER);
}
@Override
public String getUser() {
return readServerProperties(APPCATALOG_JDBC_USER);
}
@Override
public String getPassword() {
return readServerProperties(APPCATALOG_JDBC_PWD);
}
@Override
public String getValidationQuery() {
return readServerProperties(APPCATALOG_JDBC_VALIDATION_QUERY);
}
private String readServerProperties(String propertyName) {
try {
return ServerSettings.getSetting(propertyName);
} catch (ApplicationSettingsException e) {
throw new RuntimeException("Unable to read airavata-server.properties...", e);
}
}
}
| 840 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/DozerConverter/StorageDateConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils.DozerConverter;
import org.dozer.DozerConverter;
import java.sql.Timestamp;
/**
* Created by skariyat on 4/11/18.
*/
public class StorageDateConverter extends DozerConverter {
public StorageDateConverter(Class prototypeA, Class prototypeB) {
super(prototypeA, prototypeB);
}
@Override
public Object convertTo(Object source, Object dest) {
if (source != null) {
if (source instanceof Long) {
return new Timestamp((long) source);
} else if (source instanceof Timestamp) {
return ((Timestamp)source).getTime();
}
}
return null;
}
@Override
public Object convertFrom(Object source, Object dest) {
return convertTo(source, dest);
}
}
| 841 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/DozerConverter/CsvStringConverter.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.utils.DozerConverter;
import java.util.Arrays;
import java.util.List;
import org.dozer.DozerConverter;
/**
* CsvStringConverter
*/
public class CsvStringConverter extends DozerConverter<String, List> {
public CsvStringConverter() {
super(String.class, List.class);
}
@Override
public List convertTo(String source, List destination) {
if (source == null || source.length() == 0) {
return null;
} else {
return Arrays.asList(source.split(","));
}
}
@Override
public String convertFrom(List source, String destination) {
if (source == null || source.isEmpty()) {
return null;
} else {
return String.join(",", source);
}
}
}
| 842 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/JPAUtil/RepCatalogJPAUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils.JPAUtil;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.JPAUtils;
import org.apache.airavata.registry.core.utils.ReplicaCatalogJDBCConfig;
public class RepCatalogJPAUtils {
// TODO: we can rename this back to replicacatalog_data once we completely replace the other replicacatalog_data persistence context in airavata-registry-core
public static final String PERSISTENCE_UNIT_NAME = "replicacatalog_data_new";
private static final JDBCConfig JDBC_CONFIG = new ReplicaCatalogJDBCConfig();
private static final EntityManagerFactory factory = JPAUtils.getEntityManagerFactory(PERSISTENCE_UNIT_NAME, JDBC_CONFIG);
public static EntityManager getEntityManager() {
return factory.createEntityManager();
}
}
| 843 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/JPAUtil/AppCatalogJPAUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils.JPAUtil;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.JPAUtils;
import org.apache.airavata.registry.core.utils.AppCatalogJDBCConfig;
public class AppCatalogJPAUtils {
// TODO: we can rename this back to appcatalog_data once we completely replace
// the other appcatalog_data persistence context in airavata-registry-core
public static final String PERSISTENCE_UNIT_NAME = "appcatalog_data_new";
private static final JDBCConfig JDBC_CONFIG = new AppCatalogJDBCConfig();
private static final EntityManagerFactory factory = JPAUtils.getEntityManagerFactory(PERSISTENCE_UNIT_NAME,
JDBC_CONFIG);
public static EntityManager getEntityManager() {
return factory.createEntityManager();
}
}
| 844 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/JPAUtil/ExpCatalogJPAUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils.JPAUtil;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.JPAUtils;
import org.apache.airavata.registry.core.utils.ExpCatalogJDBCConfig;
public class ExpCatalogJPAUtils {
public static final String PERSISTENCE_UNIT_NAME = "experiment_data_new";
private static final JDBCConfig JDBC_CONFIG = new ExpCatalogJDBCConfig();
private static final EntityManagerFactory factory = JPAUtils.getEntityManagerFactory(PERSISTENCE_UNIT_NAME, JDBC_CONFIG);
public static EntityManager getEntityManager() {
return factory.createEntityManager();
}
}
| 845 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/JPAUtil/WorkflowCatalogJPAUtils.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.utils.JPAUtil;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.JPAUtils;
import org.apache.airavata.registry.core.utils.WorkflowCatalogJDBCConfig;
public class WorkflowCatalogJPAUtils {
private static final String PERSISTENCE_UNIT_NAME = "workflowcatalog_data_new";
private static final JDBCConfig JDBC_CONFIG = new WorkflowCatalogJDBCConfig();
private static final EntityManagerFactory factory = JPAUtils.getEntityManagerFactory(PERSISTENCE_UNIT_NAME, JDBC_CONFIG);
public static EntityManager getEntityManager() {
return factory.createEntityManager();
}
}
| 846 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/migration/MigrationSchemaGenerator.java | package org.apache.airavata.registry.core.utils.migration;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import org.apache.airavata.common.utils.DBInitConfig;
import org.apache.airavata.common.utils.DBInitializer;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.registry.core.utils.AppCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.ExpCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.ReplicaCatalogDBInitConfig;
import org.apache.airavata.registry.core.utils.JPAUtil.AppCatalogJPAUtils;
import org.apache.airavata.registry.core.utils.JPAUtil.ExpCatalogJPAUtils;
import org.apache.airavata.registry.core.utils.JPAUtil.RepCatalogJPAUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MigrationSchemaGenerator {
private static final Logger logger = LoggerFactory.getLogger(MigrationSchemaGenerator.class);
private enum Database {
app_catalog(new AppCatalogDBInitConfig().setDbInitScriptPrefix("appcatalog"),
AppCatalogJPAUtils.PERSISTENCE_UNIT_NAME),
experiment_catalog(new ExpCatalogDBInitConfig().setDbInitScriptPrefix("expcatalog"),
ExpCatalogJPAUtils.PERSISTENCE_UNIT_NAME),
replica_catalog(new ReplicaCatalogDBInitConfig().setDbInitScriptPrefix("replicacatalog"),
RepCatalogJPAUtils.PERSISTENCE_UNIT_NAME);
private final DBInitConfig dbInitConfig;
private final String persistenceUnitName;
Database(DBInitConfig dbInitConfig, String persistenceUnitName) {
this.dbInitConfig = dbInitConfig;
this.persistenceUnitName = persistenceUnitName;
}
}
public static void main(String[] args) throws Exception {
String schemaAction = args.length > 0 ? args[0] : "add";
try {
for (Database database : Database.values()) {
waitForDatabaseServer(database.dbInitConfig.getJDBCConfig(), 60);
try {
logger.info("initializing database " + database.name());
DBInitializer.initializeDB(database.dbInitConfig);
} catch (Exception e) {
logger.error("Failed to initialize database " + database.name(), e);
} finally {
String outputFile = "add".equals(schemaAction) ? database.name() + "-migration.sql"
: database.name() + "-schema.sql";
logger.info("creating database script: " + outputFile);
MappingToolRunner.run(database.dbInitConfig.getJDBCConfig(), outputFile,
database.persistenceUnitName, schemaAction);
}
}
} catch (Exception e) {
logger.error("Failed to create the databases", e);
throw e;
}
}
private static void waitForDatabaseServer(JDBCConfig jdbcConfig, int timeoutSeconds) {
long startTime = System.currentTimeMillis();
boolean connected = false;
while (!connected) {
if ((System.currentTimeMillis() - startTime) / 1000 > timeoutSeconds) {
throw new RuntimeException(
"Failed to connect to database server after " + timeoutSeconds + " seconds!");
}
Connection conn = null;
try {
Class.forName(jdbcConfig.getDriver());
conn = DriverManager.getConnection(jdbcConfig.getURL(), jdbcConfig.getUser(), jdbcConfig.getPassword());
connected = conn.isValid(10);
} catch (Exception e) {
logger.debug("Failed to connect to database: " + e.getMessage() + ", waiting 1 second before retrying");
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
logger.warn("Thread sleep interrupted, ignoring");
}
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
logger.warn("Failed to close connection, ignoring");
}
}
}
}
}
}
| 847 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/utils/migration/MappingToolRunner.java | package org.apache.airavata.registry.core.utils.migration;
import org.apache.airavata.common.utils.JDBCConfig;
import org.apache.airavata.common.utils.JPAUtils;
import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
import org.apache.openjpa.jdbc.conf.JDBCConfigurationImpl;
import org.apache.openjpa.jdbc.meta.MappingTool;
import org.apache.openjpa.lib.util.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MappingToolRunner {
private static Logger logger = LoggerFactory.getLogger(MappingToolRunner.class);
public static void run(JDBCConfig jdbcConfig, String outputFile, String persistenceUnitName) {
run(jdbcConfig, outputFile, persistenceUnitName, MappingTool.ACTION_ADD);
}
// schemaAction is one of MappingTool's supported actions: http://openjpa.apache.org/builds/2.4.3/apache-openjpa/docs/ref_guide_mapping.html#ref_guide_mapping_mappingtool
public static void run(JDBCConfig jdbcConfig, String outputFile, String persistenceUnitName, String schemaAction) {
JDBCConfiguration jdbcConfiguration = new JDBCConfigurationImpl();
jdbcConfiguration.fromProperties(JPAUtils.createConnectionProperties(jdbcConfig));
jdbcConfiguration.setConnectionDriverName("org.apache.commons.dbcp.BasicDataSource");
Options options = new Options();
options.put("sqlFile", outputFile);
// schemaAction "add" brings the schema up to date by adding missing schema elements
// schemaAction "build" creates the entire schema as if the database is empty
options.put("schemaAction", schemaAction);
options.put("foreignKeys", "true");
options.put("indexes", "true");
options.put("primaryKeys", "true");
// Specify persistence-unit name using it's anchor in the persistence.xml file
// http://openjpa.apache.org/builds/2.4.3/apache-openjpa/docs/ref_guide_conf_devtools.html
options.put("properties", "persistence.xml#" + persistenceUnitName);
try {
MappingTool.run(jdbcConfiguration, new String[] {}, options, null);
} catch (Exception mappingToolEx) {
logger.error("Failed to run MappingTool", mappingToolEx);
throw new RuntimeException(
"Failed to run MappingTool to generate migration script", mappingToolEx);
}
}
}
| 848 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/AbstractRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories;
import org.apache.airavata.registry.core.utils.Committer;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public abstract class AbstractRepository<T, E, Id> {
private final static Logger logger = LoggerFactory.getLogger(AbstractRepository.class);
private Class<T> thriftGenericClass;
private Class<E> dbEntityGenericClass;
public AbstractRepository(Class<T> thriftGenericClass, Class<E> dbEntityGenericClass) {
this.thriftGenericClass = thriftGenericClass;
this.dbEntityGenericClass = dbEntityGenericClass;
}
public T create(T t) {
return update(t);
}
public T update(T t) {
return mergeEntity(mapToEntity(t));
}
protected E mapToEntity(T t) {
Mapper mapper = ObjectMapperSingleton.getInstance();
return mapper.map(t, dbEntityGenericClass);
}
protected T mergeEntity(E entity) {
Mapper mapper = ObjectMapperSingleton.getInstance();
E persistedCopy = execute(entityManager -> entityManager.merge(entity));
return mapper.map(persistedCopy, thriftGenericClass);
}
public boolean delete(Id id) {
execute(entityManager -> {
E entity = entityManager.find(dbEntityGenericClass, id);
entityManager.remove(entity);
return entity;
});
return true;
}
public T get(Id id) {
E entity = execute(entityManager -> entityManager
.find(dbEntityGenericClass, id));
if(entity == null)
return null;
Mapper mapper = ObjectMapperSingleton.getInstance();
return mapper.map(entity, thriftGenericClass);
}
public List<T> select(String query, int offset) {
List resultSet = (List) execute(entityManager -> entityManager.createQuery(query).setFirstResult(offset)
.getResultList());
Mapper mapper = ObjectMapperSingleton.getInstance();
List<T> gatewayList = new ArrayList<>();
resultSet.stream().forEach(rs -> gatewayList.add(mapper.map(rs, thriftGenericClass)));
return gatewayList;
}
public List<T> select(String query, int limit, int offset, Map<String, Object> queryParams) {
int newLimit = limit < 0 ? DBConstants.SELECT_MAX_ROWS: limit;
List resultSet = (List) execute(entityManager -> {
Query jpaQuery = entityManager.createQuery(query);
for (Map.Entry<String, Object> entry : queryParams.entrySet()) {
jpaQuery.setParameter(entry.getKey(), entry.getValue());
}
return jpaQuery.setFirstResult(offset).setMaxResults(newLimit).getResultList();
});
Mapper mapper = ObjectMapperSingleton.getInstance();
List<T> gatewayList = new ArrayList<>();
resultSet.stream().forEach(rs -> gatewayList.add(mapper.map(rs, thriftGenericClass)));
return gatewayList;
}
public boolean isExists(Id id) {
return get(id) != null;
}
public int scalarInt(String query, Map<String, Object> queryParams) {
int scalarInt = execute(entityManager -> {
Query jpaQuery = entityManager.createQuery(query);
for (Map.Entry<String, Object> entry : queryParams.entrySet()) {
jpaQuery.setParameter(entry.getKey(), entry.getValue());
}
return ((Number)jpaQuery.getSingleResult()).intValue();
});
return scalarInt;
}
public <R> R execute(Committer<EntityManager, R> committer){
EntityManager entityManager = null;
try {
entityManager = getEntityManager();
} catch (Exception e) {
logger.error("Failed to get EntityManager", e);
throw new RuntimeException("Failed to get EntityManager", e);
}
try {
entityManager.getTransaction().begin();
R r = committer.commit(entityManager);
entityManager.getTransaction().commit();
return r;
} catch(Exception e) {
logger.error("Failed to execute transaction", e);
throw e;
}finally {
if (entityManager != null && entityManager.isOpen()) {
if (entityManager.getTransaction().isActive()) {
entityManager.getTransaction().rollback();
}
entityManager.close();
}
}
}
public void executeWithNativeQuery(String query, String... params) {
EntityManager entityManager = null;
try {
entityManager = getEntityManager();
} catch (Exception e) {
logger.error("Failed to get EntityManager", e);
throw new RuntimeException("Failed to get EntityManager", e);
}
try {
Query nativeQuery = entityManager.createNativeQuery(query);
for(int i=0;i<params.length;i++){
nativeQuery.setParameter((i+1),params[i]);
}
entityManager.getTransaction().begin();
nativeQuery.executeUpdate();
entityManager.getTransaction().commit();
} catch(Exception e) {
logger.error("Failed to execute transaction", e);
throw e;
}finally {
if (entityManager != null && entityManager.isOpen()) {
if (entityManager.getTransaction().isActive()) {
entityManager.getTransaction().rollback();
}
entityManager.close();
}
}
}
public List selectWithNativeQuery(String query, String... params) {
EntityManager entityManager = null;
try {
entityManager = getEntityManager();
} catch (Exception e) {
logger.error("Failed to get EntityManager", e);
throw new RuntimeException("Failed to get EntityManager", e);
}
try {
Query nativeQuery = entityManager.createNativeQuery(query);
for(int i=0;i<params.length;i++){
nativeQuery.setParameter((i+1),params[i]);
}
return nativeQuery.getResultList();
} catch(Exception e) {
logger.error("Failed to execute transaction", e);
throw e;
}finally {
if (entityManager != null && entityManager.isOpen()) {
if (entityManager.getTransaction().isActive()) {
entityManager.getTransaction().rollback();
}
entityManager.close();
}
}
}
abstract protected EntityManager getEntityManager();
}
| 849 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/GatewayRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.model.workspace.Gateway;
import org.apache.airavata.registry.core.entities.expcatalog.GatewayEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GatewayRepository extends ExpCatAbstractRepository<Gateway, GatewayEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(GatewayRepository.class);
public GatewayRepository() { super(Gateway.class, GatewayEntity.class); }
protected String saveGatewayData(Gateway gateway) throws RegistryException {
GatewayEntity gatewayEntity = saveGateway(gateway);
return gatewayEntity.getGatewayId();
}
protected GatewayEntity saveGateway(Gateway gateway) throws RegistryException {
String gatewayId = gateway.getGatewayId();
Mapper mapper = ObjectMapperSingleton.getInstance();
GatewayEntity gatewayEntity = mapper.map(gateway, GatewayEntity.class);
if (!isGatewayExist(gatewayId)) {
logger.debug("Checking if the Gateway already exists");
gatewayEntity.setRequestCreationTime(new Timestamp(System.currentTimeMillis()));
}
return execute(entityManager -> entityManager.merge(gatewayEntity));
}
public String addGateway(Gateway gateway) throws RegistryException{
return saveGatewayData(gateway);
}
public void updateGateway(String gatewayId, Gateway updatedGateway) throws RegistryException{
saveGatewayData(updatedGateway);
}
public Gateway getGateway(String gatewayId) throws RegistryException{
return get(gatewayId);
}
public List<Gateway> getAllGateways() throws RegistryException {
List<Gateway> gatewayList = select(QueryConstants.GET_ALL_GATEWAYS, 0);
return gatewayList;
}
public Gateway getDefaultGateway() throws ApplicationSettingsException, RegistryException {
String defaultGatewayName = ServerSettings.getDefaultUserGateway();
return getExistingGateway(defaultGatewayName);
}
public Gateway getExistingGateway(String gatewayName) throws RegistryException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Gateway.GATEWAY_NAME, gatewayName);
List<Gateway> gatewayList = select(QueryConstants.GET_GATEWAY_FROM_GATEWAY_NAME, -1, 0, queryParameters);
if (gatewayList != null && !gatewayList.isEmpty()) {
logger.debug("Return the record (there is only one record)");
return gatewayList.get(0);
}
return null;
}
public boolean isGatewayExist(String gatewayId) throws RegistryException{
return isExists(gatewayId);
}
public boolean removeGateway(String gatewayId) throws RegistryException{
return delete(gatewayId);
}
} | 850 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/NotificationRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.workspace.Notification;
import org.apache.airavata.registry.core.entities.expcatalog.NotificationEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class NotificationRepository extends ExpCatAbstractRepository<Notification, NotificationEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(NotificationRepository.class);
public NotificationRepository() { super(Notification.class, NotificationEntity.class); }
protected String saveNotificationData(Notification notification) throws RegistryException {
NotificationEntity notificationEntity = saveNotification(notification);
return notificationEntity.getNotificationId();
}
protected NotificationEntity saveNotification(Notification notification) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
NotificationEntity notificationEntity = mapper.map(notification, NotificationEntity.class);
if (notificationEntity.getCreationTime() != null) {
logger.debug("Setting the Notification's creation time");
notificationEntity.setCreationTime(new Timestamp(notification.getCreationTime()));
}
else {
logger.debug("Setting the Notification's creation time to current time");
notificationEntity.setCreationTime(new Timestamp(System.currentTimeMillis()));
}
if (notificationEntity.getPublishedTime() != null) {
logger.debug("Setting the Notification's published time");
notificationEntity.setPublishedTime(new Timestamp(notification.getPublishedTime()));
}
if (notificationEntity.getExpirationTime() != null) {
logger.debug("Setting the Notification's expiration time");
notificationEntity.setExpirationTime(new Timestamp(notification.getExpirationTime()));
}
return execute(entityManager -> entityManager.merge(notificationEntity));
}
public String createNotification(Notification notification) throws RegistryException {
notification.setNotificationId(getNotificationId());
return saveNotificationData(notification);
}
public void updateNotification(Notification notification) throws RegistryException {
saveNotificationData(notification);
}
public Notification getNotification(String notificationId) throws RegistryException{
return get(notificationId);
}
public List<Notification> getAllGatewayNotifications(String gatewayId) throws RegistryException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Notification.GATEWAY_ID, gatewayId);
List<Notification> notificationList = select(QueryConstants.GET_ALL_GATEWAY_NOTIFICATIONS, -1, 0, queryParameters);
return notificationList;
}
private String getNotificationId() {
return UUID.randomUUID().toString();
}
public void deleteNotification(String notificationId) throws RegistryException {
delete(notificationId);
}
} | 851 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/TaskErrorRepository.java | package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.ErrorModel;
import org.apache.airavata.model.task.TaskModel;
import org.apache.airavata.registry.core.entities.expcatalog.TaskErrorEntity;
import org.apache.airavata.registry.core.entities.expcatalog.TaskErrorPK;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public class TaskErrorRepository extends ExpCatAbstractRepository<ErrorModel, TaskErrorEntity, TaskErrorPK> {
private final static Logger logger = LoggerFactory.getLogger(TaskErrorRepository.class);
public TaskErrorRepository() { super(ErrorModel.class, TaskErrorEntity.class); }
protected String saveTaskError(ErrorModel error, String taskId) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
TaskErrorEntity taskErrorEntity = mapper.map(error, TaskErrorEntity.class);
if (taskErrorEntity.getTaskId() == null) {
logger.debug("Setting the TaskErrorEntity's TaskId");
taskErrorEntity.setTaskId(taskId);
}
execute(entityManager -> entityManager.merge(taskErrorEntity));
return taskErrorEntity.getErrorId();
}
public String addTaskError(ErrorModel taskError, String taskId) throws RegistryException {
if (taskError.getErrorId() == null) {
logger.debug("Setting the TaskError's ErrorId");
taskError.setErrorId(ExpCatalogUtils.getID("ERROR"));
}
return saveTaskError(taskError, taskId);
}
public String updateTaskError(ErrorModel updatedTaskError, String taskId) throws RegistryException {
return saveTaskError(updatedTaskError, taskId);
}
public List<ErrorModel> getTaskError(String taskId) throws RegistryException {
TaskRepository taskRepository = new TaskRepository();
TaskModel taskModel = taskRepository.getTask(taskId);
return taskModel.getTaskErrors();
}
}
| 852 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/JobRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.model.job.JobModel;
import org.apache.airavata.registry.core.entities.expcatalog.JobEntity;
import org.apache.airavata.registry.core.entities.expcatalog.JobPK;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JobRepository extends ExpCatAbstractRepository<JobModel, JobEntity, JobPK> {
private final static Logger logger = LoggerFactory.getLogger(JobRepository.class);
public JobRepository() { super(JobModel.class, JobEntity.class); }
protected String saveJobModelData(JobModel jobModel, JobPK jobPK) throws RegistryException {
JobEntity jobEntity = saveJob(jobModel, jobPK);
return jobEntity.getJobId();
}
protected JobEntity saveJob(JobModel jobModel, JobPK jobPK) throws RegistryException {
if (jobModel.getJobId() == null || jobModel.getJobId().equals(airavata_commonsConstants.DEFAULT_ID)) {
logger.debug("Setting the Job's JobId");
jobModel.setJobId(jobPK.getJobId());
}
if (jobModel.getJobStatuses() != null) {
logger.debug("Populating the status ids of JobStatus objects for the Job");
jobModel.getJobStatuses().forEach(jobStatus -> {
if (jobStatus.getStatusId() == null) {
jobStatus.setStatusId(ExpCatalogUtils.getID("JOB_STATE"));
}
});
}
if (!isJobExist(jobPK)) {
logger.debug("Setting creation time to current time if does not exist");
jobModel.setCreationTime(System.currentTimeMillis());
}
Mapper mapper = ObjectMapperSingleton.getInstance();
JobEntity jobEntity = mapper.map(jobModel, JobEntity.class);
populateParentIds(jobEntity);
return execute(entityManager -> entityManager.merge(jobEntity));
}
protected void populateParentIds(JobEntity jobEntity) {
String jobId = jobEntity.getJobId();
String taskId = jobEntity.getTaskId();
if (jobEntity.getJobStatuses() != null) {
logger.debug("Populating the Primary Key of JobStatus objects for the Job");
jobEntity.getJobStatuses().forEach(jobStatusEntity -> {
jobStatusEntity.setJobId(jobId);
jobStatusEntity.setTaskId(taskId);
});
}
}
public String addJob(JobModel job, String processId) throws RegistryException {
JobPK jobPK = new JobPK();
jobPK.setJobId(job.getJobId());
jobPK.setTaskId(job.getTaskId());
String jobId = saveJobModelData(job, jobPK);
return jobId;
}
public String updateJob(JobModel job, JobPK jobPK) throws RegistryException {
return saveJobModelData(job, jobPK);
}
public JobModel getJob(JobPK jobPK) throws RegistryException {
return get(jobPK);
}
public List<JobModel> getJobList(String fieldName, Object value) throws RegistryException {
JobRepository jobRepository = new JobRepository();
List<JobModel> jobModelList;
if (fieldName.equals(DBConstants.Job.PROCESS_ID)) {
logger.debug("Search criteria is ProcessId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Job.PROCESS_ID, value);
jobModelList = jobRepository.select(QueryConstants.GET_JOB_FOR_PROCESS_ID, -1, 0, queryParameters);
}
else if (fieldName.equals(DBConstants.Job.TASK_ID)) {
logger.debug("Search criteria is TaskId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Job.TASK_ID, value);
jobModelList = jobRepository.select(QueryConstants.GET_JOB_FOR_TASK_ID, -1, 0, queryParameters);
}
else if (fieldName.equals(DBConstants.Job.JOB_ID)) {
logger.debug("Search criteria is JobId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Job.JOB_ID, value);
jobModelList = jobRepository.select(QueryConstants.GET_JOB_FOR_JOB_ID, -1, 0, queryParameters);
}
else {
logger.error("Unsupported field name for Job module.");
throw new IllegalArgumentException("Unsupported field name for Job module.");
}
return jobModelList;
}
public List<String> getJobIds(String fieldName, Object value) throws RegistryException {
List<String> jobIds = new ArrayList<>();
List<JobModel> jobModelList = getJobList(fieldName, value);
for (JobModel jobModel : jobModelList) {
jobIds.add(jobModel.getJobId());
}
return jobIds;
}
public boolean isJobExist(JobPK jobPK) throws RegistryException {
return isExists(jobPK);
}
public void removeJob(JobPK jobPK) throws RegistryException {
delete(jobPK);
}
public void removeJob(JobModel jobModel) throws RegistryException {
executeWithNativeQuery(QueryConstants.DELETE_JOB_NATIVE_QUERY,jobModel.getJobId(),jobModel.getTaskId());
}
}
| 853 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentErrorRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.ErrorModel;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentErrorEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentErrorPK;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public class ExperimentErrorRepository extends ExpCatAbstractRepository<ErrorModel, ExperimentErrorEntity, ExperimentErrorPK> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentErrorRepository.class);
public ExperimentErrorRepository() {
super(ErrorModel.class, ExperimentErrorEntity.class);
}
protected String saveExperimentError(ErrorModel error, String experimentId) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
ExperimentErrorEntity experimentErrorEntity = mapper.map(error, ExperimentErrorEntity.class);
if (experimentErrorEntity.getExperimentId() == null) {
logger.debug("Setting the ExperimentErrorEntity's ExperimentId");
experimentErrorEntity.setExperimentId(experimentId);
}
execute(entityManager -> entityManager.merge(experimentErrorEntity));
return experimentErrorEntity.getErrorId();
}
public String addExperimentError(ErrorModel experimentError, String experimentId) throws RegistryException {
if (experimentError.getErrorId() == null) {
logger.debug("Setting the ExperimentError's ErrorId");
experimentError.setErrorId(ExpCatalogUtils.getID("ERROR"));
}
return saveExperimentError(experimentError, experimentId);
}
public String updateExperimentError(ErrorModel updatedExperimentError, String experimentId) throws RegistryException {
return saveExperimentError(updatedExperimentError, experimentId);
}
public List<ErrorModel> getExperimentErrors(String experimentId) throws RegistryException {
ExperimentRepository experimentRepository = new ExperimentRepository();
ExperimentModel experimentModel = experimentRepository.getExperiment(experimentId);
return experimentModel.getErrors();
}
}
| 854 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentInputRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentInputEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentInputPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public class ExperimentInputRepository extends ExpCatAbstractRepository<InputDataObjectType, ExperimentInputEntity, ExperimentInputPK> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentInputRepository.class);
public ExperimentInputRepository() { super(InputDataObjectType.class, ExperimentInputEntity.class); }
protected void saveExperimentInput(List<InputDataObjectType> experimentInputs, String experimentId) throws RegistryException {
for (InputDataObjectType input : experimentInputs) {
Mapper mapper = ObjectMapperSingleton.getInstance();
ExperimentInputEntity experimentInputEntity = mapper.map(input, ExperimentInputEntity.class);
if (experimentInputEntity.getExperimentId() == null) {
logger.debug("Setting the ExperimentInputEntity's ExperimentId");
experimentInputEntity.setExperimentId(experimentId);
}
execute(entityManager -> entityManager.merge(experimentInputEntity));
}
}
public String addExperimentInputs(List<InputDataObjectType> experimentInputs, String experimentId) throws RegistryException {
saveExperimentInput(experimentInputs, experimentId);
return experimentId;
}
public void updateExperimentInputs(List<InputDataObjectType> updatedExperimentInputs, String experimentId) throws RegistryException {
saveExperimentInput(updatedExperimentInputs, experimentId);
}
public List<InputDataObjectType> getExperimentInputs(String experimentId) throws RegistryException {
ExperimentRepository experimentRepository = new ExperimentRepository();
ExperimentModel experimentModel = experimentRepository.getExperiment(experimentId);
return experimentModel.getExperimentInputs();
}
}
| 855 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/GatewayUsageReportingCommandRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.workspace.GatewayUsageReportingCommand;
import org.apache.airavata.registry.core.entities.expcatalog.GatewayUsageReportingCommandEntity;
import org.apache.airavata.registry.core.entities.expcatalog.GatewayUsageReportingPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GatewayUsageReportingCommandRepository extends
ExpCatAbstractRepository<GatewayUsageReportingCommand, GatewayUsageReportingCommandEntity, GatewayUsageReportingPK> {
private final static Logger logger = LoggerFactory.getLogger(GatewayRepository.class);
public GatewayUsageReportingCommandRepository() { super(GatewayUsageReportingCommand.class, GatewayUsageReportingCommandEntity.class); }
public void addGatewayUsageReportingCommand(GatewayUsageReportingCommand command) throws RegistryException {
String gatewayId = command.getGatewayId();
Mapper mapper = ObjectMapperSingleton.getInstance();
GatewayUsageReportingCommandEntity reportingEntity = mapper.map(command, GatewayUsageReportingCommandEntity.class);
execute(entityManager -> entityManager.merge(reportingEntity));
logger.info("Added gateway usage reporting command for gateway {} to the database", command.getGatewayId());
}
public GatewayUsageReportingCommand getGatewayUsageReportingCommand(String gatewayId, String computeResourceId) {
GatewayUsageReportingPK pk = new GatewayUsageReportingPK();
pk.setGatewayId(gatewayId);
pk.setComputeResourceId(computeResourceId);
return get(pk);
}
public boolean isGatewayUsageReportingCommandExists(String gatewayId, String computeResourceId) throws RegistryException {
GatewayUsageReportingPK pk = new GatewayUsageReportingPK();
pk.setGatewayId(gatewayId);
pk.setComputeResourceId(computeResourceId);
return isExists(pk);
}
public void removeGatewayUsageReportingCommand(String gatewayId, String computeResourceId) throws RegistryException {
if (isGatewayUsageReportingCommandExists(gatewayId, computeResourceId)) {
GatewayUsageReportingPK pk = new GatewayUsageReportingPK();
pk.setGatewayId(gatewayId);
pk.setComputeResourceId(computeResourceId);
delete(pk);
logger.info("Deleted gateway usage reporting command for gateway {}", gatewayId);
}
}
}
| 856 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentSummaryRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.experiment.ExperimentStatistics;
import org.apache.airavata.model.experiment.ExperimentSummaryModel;
import org.apache.airavata.model.status.ExperimentState;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentSummaryEntity;
import org.apache.airavata.registry.core.entities.expcatalog.JobEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.ResultOrderType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.persistence.Query;
public class ExperimentSummaryRepository extends ExpCatAbstractRepository<ExperimentSummaryModel, ExperimentSummaryEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentSummaryRepository.class);
private final static int ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE = 10000;
public ExperimentSummaryRepository() { super(ExperimentSummaryModel.class, ExperimentSummaryEntity.class); }
public List<ExperimentSummaryModel> searchAllAccessibleExperiments(List<String> accessibleExperimentIds, Map<String, String> filters, int limit,
int offset, Object orderByIdentifier, ResultOrderType resultOrderType) throws RegistryException, IllegalArgumentException {
String query = "SELECT ES FROM " + ExperimentSummaryEntity.class.getSimpleName() + " ES WHERE ";
String whereClause = "";
Map<String, Object> queryParameters = new HashMap<>();
if (filters == null || !filters.containsKey(DBConstants.Experiment.GATEWAY_ID)) {
logger.error("GatewayId is required");
throw new RegistryException("GatewayId is required");
}
if (filters.get(DBConstants.Job.JOB_ID) != null) {
logger.debug("Filter Experiments by JobId");
queryParameters.put(DBConstants.Job.JOB_ID, filters.get(DBConstants.Job.JOB_ID));
String query_jobId = "SELECT P.experimentId FROM "
+ JobEntity.class.getSimpleName() + " J "
+ " JOIN J.task T"
+ " JOIN T.process P"
+ " WHERE J.jobId = : " + DBConstants.Job.JOB_ID;
whereClause += "ES.experimentId IN ( " + query_jobId + " ) AND ";
}
if (filters.get(DBConstants.Experiment.USER_NAME) != null) {
logger.debug("Filter Experiments by User");
queryParameters.put(DBConstants.Experiment.USER_NAME, filters.get(DBConstants.Experiment.USER_NAME));
whereClause += "ES.userName LIKE :" + DBConstants.Experiment.USER_NAME + " AND ";
}
if (filters.get(DBConstants.Experiment.GATEWAY_ID) != null) {
logger.debug("Filter Experiments by Gateway ID");
queryParameters.put(DBConstants.Experiment.GATEWAY_ID, filters.get(DBConstants.Experiment.GATEWAY_ID));
whereClause += "ES.gatewayId LIKE :" + DBConstants.Experiment.GATEWAY_ID + " AND ";
}
if (filters.get(DBConstants.Experiment.PROJECT_ID) != null) {
logger.debug("Filter Experiments by Project ID");
queryParameters.put(DBConstants.Experiment.PROJECT_ID, filters.get(DBConstants.Experiment.PROJECT_ID));
whereClause += "ES.projectId LIKE :" + DBConstants.Experiment.PROJECT_ID + " AND ";
}
if (filters.get(DBConstants.Experiment.EXPERIMENT_NAME) != null) {
logger.debug("Filter Experiments by Name");
queryParameters.put(DBConstants.Experiment.EXPERIMENT_NAME, filters.get(DBConstants.Experiment.EXPERIMENT_NAME));
whereClause += "ES.name LIKE :" + DBConstants.Experiment.EXPERIMENT_NAME + " AND ";
}
if (filters.get(DBConstants.Experiment.DESCRIPTION) != null) {
logger.debug("Filter Experiments by Description");
queryParameters.put(DBConstants.Experiment.DESCRIPTION, filters.get(DBConstants.Experiment.DESCRIPTION));
whereClause += "ES.description LIKE :" + DBConstants.Experiment.DESCRIPTION + " AND ";
}
if (filters.get(DBConstants.Experiment.EXECUTION_ID) != null) {
logger.debug("Filter Experiments by Execution ID");
queryParameters.put(DBConstants.Experiment.EXECUTION_ID, filters.get(DBConstants.Experiment.EXECUTION_ID));
whereClause += "ES.executionId LIKE :" + DBConstants.Experiment.EXECUTION_ID + " AND ";
}
if (filters.get(DBConstants.ExperimentSummary.EXPERIMENT_STATUS) != null) {
logger.debug("Filter Experiments by State");
String state = ExperimentState.valueOf(filters.get(DBConstants.ExperimentSummary.EXPERIMENT_STATUS)).toString();
queryParameters.put(DBConstants.ExperimentSummary.EXPERIMENT_STATUS, state);
whereClause += "ES.experimentStatus LIKE :" + DBConstants.ExperimentSummary.EXPERIMENT_STATUS + " AND ";
}
if (filters.get(DBConstants.ExperimentSummary.FROM_DATE) != null
&& filters.get(DBConstants.ExperimentSummary.TO_DATE) != null) {
Timestamp fromDate = new Timestamp(Long.valueOf(filters.get(DBConstants.ExperimentSummary.FROM_DATE)));
Timestamp toDate = new Timestamp(Long.valueOf(filters.get(DBConstants.ExperimentSummary.TO_DATE)));
if (toDate.after(fromDate)) {
logger.debug("Filter Experiments by CreationTime");
queryParameters.put(DBConstants.ExperimentSummary.FROM_DATE, fromDate);
queryParameters.put(DBConstants.ExperimentSummary.TO_DATE, toDate);
whereClause += "ES.creationTime BETWEEN :" + DBConstants.ExperimentSummary.FROM_DATE + " AND :"
+ DBConstants.ExperimentSummary.TO_DATE + " AND ";
}
}
if (filters.get(DBConstants.Experiment.USER_NAME) != null) {
logger.debug("Filter Experiments by Username");
queryParameters.put(DBConstants.Experiment.USER_NAME, filters.get(DBConstants.Experiment.USER_NAME));
whereClause += "ES.userName = :" + DBConstants.Experiment.USER_NAME + " AND ";
}
if (!accessibleExperimentIds.isEmpty()) {
logger.debug("Filter Experiments by Accessible Experiment IDs");
queryParameters.put(DBConstants.Experiment.ACCESSIBLE_EXPERIMENT_IDS, accessibleExperimentIds);
whereClause += " ES.experimentId IN :" + DBConstants.Experiment.ACCESSIBLE_EXPERIMENT_IDS;
}
else {
// If no experiments are accessible then immediately return an empty list
return new ArrayList<ExperimentSummaryModel>();
}
int queryLimit = limit;
int queryOffset = offset;
int accessibleExperimentIdsBatchNum = 0;
// Figure out the initial batch of accessible experiment ids and the
// offset into it by counting the matching experiments in each batch
if (queryOffset > 0) {
String countQuery = "SELECT COUNT(ES) FROM " + ExperimentSummaryEntity.class.getSimpleName() + " ES WHERE ";
countQuery += whereClause;
BatchOffset batchOffset = findInitialAccessibleExperimentsBatchOffset(countQuery, queryOffset, queryParameters, accessibleExperimentIds);
queryOffset = batchOffset.offset;
accessibleExperimentIdsBatchNum = batchOffset.batchNum;
}
query += whereClause;
if (orderByIdentifier != null && resultOrderType != null && orderByIdentifier.equals(DBConstants.Experiment.CREATION_TIME)) {
String order = (resultOrderType == ResultOrderType.ASC) ? "ASC" : "DESC";
query += " ORDER BY ES." + DBConstants.Experiment.CREATION_TIME + " " + order;
}
List<ExperimentSummaryModel> allExperimentSummaryModels = new ArrayList<>();
// Break up the query in batches over accessibleExperimentIds
// NOTE: this assumes that the accessibleExperimentIds are sorted in the
// same order as the expected experiment summary results
double totalBatches = Math.ceil(
Integer.valueOf(accessibleExperimentIds.size()).floatValue() / ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE);
for (int batchNum = accessibleExperimentIdsBatchNum; batchNum < totalBatches; batchNum++) {
List<String> accessibleExperimentIdsBatch = accessibleExperimentIds.subList(
batchNum * ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE,
Math.min(accessibleExperimentIds.size(), (batchNum + 1) * ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE));
queryParameters.put(DBConstants.Experiment.ACCESSIBLE_EXPERIMENT_IDS, accessibleExperimentIdsBatch);
List<ExperimentSummaryModel> experimentSummaryModelList = select(query, queryLimit, queryOffset, queryParameters);
allExperimentSummaryModels.addAll(experimentSummaryModelList);
if (allExperimentSummaryModels.size() == limit) {
return allExperimentSummaryModels;
} else if (limit > 0 && allExperimentSummaryModels.size() < limit) {
queryLimit -= experimentSummaryModelList.size();
// In the next and subsequent batches, start from offset 0
queryOffset = 0;
}
}
return allExperimentSummaryModels;
}
class BatchOffset {
final int batchNum;
final int offset;
BatchOffset(int batchNum, int offset) {
this.batchNum = batchNum;
this.offset = offset;
}
}
private BatchOffset findInitialAccessibleExperimentsBatchOffset(String query, int queryOffset,
Map<String, Object> queryParameters, List<String> accessibleExperimentIds) {
int accumulator = 0;
double totalBatches = Math.ceil(
Integer.valueOf(accessibleExperimentIds.size()).floatValue() / ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE);
for (int batchNum = 0; batchNum < totalBatches; batchNum++) {
List<String> accessibleExperimentIdsBatch = accessibleExperimentIds.subList(
batchNum * ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE,
Math.min(accessibleExperimentIds.size(), (batchNum + 1) * ACCESSIBLE_EXPERIMENT_IDS_BATCH_SIZE));
queryParameters.put(DBConstants.Experiment.ACCESSIBLE_EXPERIMENT_IDS, accessibleExperimentIdsBatch);
int count = scalarInt(query, queryParameters);
if (accumulator + count > queryOffset ) {
return new BatchOffset(batchNum, queryOffset - accumulator);
} else if (accumulator + count == queryOffset) {
// The initial batch is the next batch since this batch ends at the queryOffset
return new BatchOffset(batchNum + 1, 0);
}
accumulator += count;
}
// We didn't find a batch with the offset in it, so just return a batch
// num past the last one
return new BatchOffset(Double.valueOf(totalBatches).intValue(), 0);
}
public ExperimentStatistics getAccessibleExperimentStatistics(List<String> accessibleExperimentIds, Map<String,String> filters, int limit, int offset) throws RegistryException {
try {
ExperimentStatistics experimentStatistics = new ExperimentStatistics();
String gatewayId = null;
String userName = null;
String applicationName = null;
String resourceHostName = null;
Timestamp fromDate = null;
Timestamp toDate = null;
if (filters == null || !filters.containsKey(DBConstants.Experiment.GATEWAY_ID)) {
logger.error("GatewayId is required");
throw new RegistryException("GatewayId is required");
}
for (String field : filters.keySet()) {
if (field.equals(DBConstants.Experiment.GATEWAY_ID)) {
logger.debug("Set the GatewayId");
gatewayId = filters.get(field);
}
if (field.equals(DBConstants.Experiment.USER_NAME)) {
logger.debug("Set the UserName");
userName = filters.get(field);
}
if (field.equals(DBConstants.Experiment.EXECUTION_ID)) {
logger.debug("Set the ApplicationName");
applicationName = filters.get(field);
}
if (field.equals(DBConstants.Experiment.RESOURCE_HOST_ID)) {
logger.debug("Set the ResourceHostName");
resourceHostName = filters.get(field);
}
if (field.equals(DBConstants.ExperimentSummary.FROM_DATE)) {
logger.debug("Set the FromDate");
fromDate = new Timestamp(Long.parseLong(filters.get(field)));
}
if (field.equals(DBConstants.ExperimentSummary.TO_DATE)) {
logger.debug("Set the ToDate");
toDate = new Timestamp(Long.parseLong(filters.get(field)));
}
}
int allExperimentsCount = getExperimentStatisticsCountForState(null, gatewayId, fromDate, toDate,
userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> allExperiments = getExperimentStatisticsForState(null, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds, limit, offset);
experimentStatistics.setAllExperimentCount(allExperimentsCount);
experimentStatistics.setAllExperiments(allExperiments);
List<ExperimentState> createdStates = Arrays.asList(ExperimentState.CREATED, ExperimentState.VALIDATED);
int createdExperimentsCount = getExperimentStatisticsCountForState(
createdStates, gatewayId, fromDate, toDate,
userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> createdExperiments = getExperimentStatisticsForState(createdStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds, limit, offset);
experimentStatistics.setCreatedExperimentCount(createdExperimentsCount);
experimentStatistics.setCreatedExperiments(createdExperiments);
List<ExperimentState> runningStates = Arrays.asList(ExperimentState.EXECUTING, ExperimentState.SCHEDULED, ExperimentState.LAUNCHED);
int runningExperimentsCount = getExperimentStatisticsCountForState(
runningStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> runningExperiments = getExperimentStatisticsForState(runningStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds, limit, offset);
experimentStatistics.setRunningExperimentCount(runningExperimentsCount);
experimentStatistics.setRunningExperiments(runningExperiments);
List<ExperimentState> completedStates = Arrays.asList(ExperimentState.COMPLETED);
int completedExperimentsCount = getExperimentStatisticsCountForState(
completedStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> completedExperiments = getExperimentStatisticsForState(
completedStates, gatewayId, fromDate, toDate, userName, applicationName, resourceHostName,
accessibleExperimentIds, limit, offset);
experimentStatistics.setCompletedExperimentCount(completedExperimentsCount);
experimentStatistics.setCompletedExperiments(completedExperiments);
List<ExperimentState> failedStates = Arrays.asList(ExperimentState.FAILED);
int failedExperimentsCount = getExperimentStatisticsCountForState(
failedStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> failedExperiments = getExperimentStatisticsForState(failedStates,
gatewayId, fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds, limit, offset);
experimentStatistics.setFailedExperimentCount(failedExperimentsCount);
experimentStatistics.setFailedExperiments(failedExperiments);
List<ExperimentState> cancelledStates = Arrays.asList(ExperimentState.CANCELED, ExperimentState.CANCELING);
int cancelledExperimentsCount = getExperimentStatisticsCountForState(
cancelledStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, accessibleExperimentIds);
List<ExperimentSummaryModel> cancelledExperiments = getExperimentStatisticsForState(
cancelledStates, gatewayId, fromDate, toDate, userName, applicationName, resourceHostName,
accessibleExperimentIds, limit, offset);
experimentStatistics.setCancelledExperimentCount(cancelledExperimentsCount);
experimentStatistics.setCancelledExperiments(cancelledExperiments);
return experimentStatistics;
}
catch (RegistryException e) {
logger.error("Error while retrieving experiment statistics from registry", e);
throw new RegistryException(e);
}
}
protected int getExperimentStatisticsCountForState(List<ExperimentState> experimentStates, String gatewayId, Timestamp fromDate, Timestamp toDate,
String userName, String applicationName, String resourceHostName, List<String> experimentIds) throws RegistryException, IllegalArgumentException {
String query = "SELECT count(ES.experimentId) FROM " + ExperimentSummaryEntity.class.getSimpleName() + " ES WHERE ";
Map<String, Object> queryParameters = new HashMap<>();
String finalQuery = filterExperimentStatisticsQuery(query, queryParameters, experimentStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, experimentIds);
if (finalQuery == null) {
return 0;
}
long count = (long) execute(entityManager -> {
Query jpaQuery = entityManager.createQuery(finalQuery);
for (Map.Entry<String, Object> entry : queryParameters.entrySet()) {
jpaQuery.setParameter(entry.getKey(), entry.getValue());
}
return jpaQuery.getSingleResult();
});
return Long.valueOf(count).intValue();
}
protected List<ExperimentSummaryModel> getExperimentStatisticsForState(List<ExperimentState> experimentStates, String gatewayId, Timestamp fromDate, Timestamp toDate,
String userName, String applicationName, String resourceHostName, List<String> experimentIds, int limit, int offset) throws RegistryException, IllegalArgumentException {
String query = "SELECT ES FROM " + ExperimentSummaryEntity.class.getSimpleName() + " ES WHERE ";
Map<String, Object> queryParameters = new HashMap<>();
query = filterExperimentStatisticsQuery(query, queryParameters, experimentStates, gatewayId,
fromDate, toDate, userName, applicationName, resourceHostName, experimentIds);
if (query == null) {
return new ArrayList<ExperimentSummaryModel>();
}
query += "ORDER BY ES.creationTime DESC, ES.experimentId"; // experimentId is the ordering tiebreaker
List<ExperimentSummaryModel> experimentSummaryModelList = select(query, limit, offset, queryParameters);
return experimentSummaryModelList;
}
protected String filterExperimentStatisticsQuery(String query, Map<String, Object> queryParameters,
List<ExperimentState> experimentStates, String gatewayId, Timestamp fromDate,
Timestamp toDate, String userName, String applicationName, String resourceHostName, List<String> experimentIds) {
if (experimentStates != null) {
logger.debug("Filter Experiments by Experiment States");
List<String> statesAsStrings = experimentStates.stream().map(s -> s.toString()).collect(Collectors.toList());
queryParameters.put(DBConstants.ExperimentSummary.EXPERIMENT_STATUS, statesAsStrings);
query += "ES.experimentStatus IN :" + DBConstants.ExperimentSummary.EXPERIMENT_STATUS + " AND ";
}
if (gatewayId != null) {
logger.debug("Filter Experiments by GatewayId");
queryParameters.put(DBConstants.Experiment.GATEWAY_ID, gatewayId);
query += "ES.gatewayId = :" + DBConstants.Experiment.GATEWAY_ID + " AND ";
}
if (fromDate != null && toDate != null) {
if (toDate.after(fromDate)) {
logger.debug("Filter Experiments by CreationTime");
queryParameters.put(DBConstants.ExperimentSummary.FROM_DATE, fromDate);
queryParameters.put(DBConstants.ExperimentSummary.TO_DATE, toDate);
query += "ES.creationTime BETWEEN :" + DBConstants.ExperimentSummary.FROM_DATE + " AND :" + DBConstants.ExperimentSummary.TO_DATE + " AND ";
}
}
if (userName != null) {
logger.debug("Filter Experiments by UserName");
queryParameters.put(DBConstants.Experiment.USER_NAME, userName);
query += "ES.userName = :" + DBConstants.Experiment.USER_NAME + " AND ";
}
if (applicationName != null) {
logger.debug("Filter Experiments by ApplicationName");
queryParameters.put(DBConstants.Experiment.EXECUTION_ID, applicationName);
query += "ES.executionId = :" + DBConstants.Experiment.EXECUTION_ID + " AND ";
}
if (experimentIds != null) {
if (!experimentIds.isEmpty()) {
logger.debug("Filter Experiments by experimentIds");
queryParameters.put(DBConstants.Experiment.EXPERIMENT_ID, experimentIds);
query += "ES.experimentId IN :" + DBConstants.Experiment.EXPERIMENT_ID + " AND ";
} else {
return null;
}
}
if (resourceHostName != null) {
logger.debug("Filter Experiments by ResourceHostName");
queryParameters.put(DBConstants.Experiment.RESOURCE_HOST_ID, resourceHostName);
query += "ES.resourceHostId = :" + DBConstants.Experiment.RESOURCE_HOST_ID + " ";
}
else {
logger.debug("Removing the last operator from the query");
query = query.substring(0, query.length() - 4);
}
return query;
}
}
| 857 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessStatusRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.status.ProcessState;
import org.apache.airavata.model.status.ProcessStatus;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessStatusEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessStatusPK;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ProcessStatusRepository extends ExpCatAbstractRepository<ProcessStatus, ProcessStatusEntity, ProcessStatusPK> {
private final static Logger logger = LoggerFactory.getLogger(ProcessStatusRepository.class);
public ProcessStatusRepository() {
super(ProcessStatus.class, ProcessStatusEntity.class);
}
protected String saveProcessStatus(ProcessStatus processStatus, String processId) throws RegistryException {
if (processStatus.getStatusId() == null) {
ProcessStatus currentProcessStatus = getProcessStatus(processId);
if (currentProcessStatus == null || currentProcessStatus.getState() != currentProcessStatus.getState()) {
processStatus.setStatusId(ExpCatalogUtils.getID("PROCESS_STATE"));
} else {
// Update the existing current status if processStatus has no status id and the same state
processStatus.setStatusId(currentProcessStatus.getStatusId());
}
}
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessStatusEntity processStatusEntity = mapper.map(processStatus, ProcessStatusEntity.class);
if (processStatusEntity.getProcessId() == null) {
logger.debug("Setting the ProcessStatusEntity's ProcessId");
processStatusEntity.setProcessId(processId);
}
execute(entityManager -> entityManager.merge(processStatusEntity));
return processStatusEntity.getStatusId();
}
public String addProcessStatus(ProcessStatus processStatus, String processId) throws RegistryException {
if (processStatus.getStatusId() == null) {
logger.debug("Setting the ProcessStatus's StatusId");
processStatus.setStatusId(ExpCatalogUtils.getID("PROCESS_STATE"));
}
return saveProcessStatus(processStatus, processId);
}
public String updateProcessStatus(ProcessStatus updatedProcessStatus, String processId) throws RegistryException {
return saveProcessStatus(updatedProcessStatus, processId);
}
public ProcessStatus getProcessStatus(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
List<ProcessStatus> processStatusList = processModel.getProcessStatuses();
if (processStatusList.size() == 0) {
logger.debug("ProcessStatus list is empty");
return null;
} else {
ProcessStatus latestProcessStatus = processStatusList.get(0);
for (int i = 1; i < processStatusList.size(); i++) {
Timestamp timeOfStateChange = new Timestamp(processStatusList.get(i).getTimeOfStateChange());
if (timeOfStateChange != null) {
if (timeOfStateChange.after(new Timestamp(latestProcessStatus.getTimeOfStateChange()))
|| (timeOfStateChange.equals(latestProcessStatus.getTimeOfStateChange()) && processStatusList.get(i).getState().equals(ProcessState.COMPLETED.toString()))
|| (timeOfStateChange.equals(latestProcessStatus.getTimeOfStateChange()) && processStatusList.get(i).getState().equals(ProcessState.FAILED.toString()))
|| (timeOfStateChange.equals(latestProcessStatus.getTimeOfStateChange()) && processStatusList.get(i).getState().equals(ProcessState.CANCELED.toString()))) {
latestProcessStatus = processStatusList.get(i);
}
}
}
return latestProcessStatus;
}
}
public List<ProcessStatus> getProcessStatusList(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
return processModel.getProcessStatuses();
}
public List<ProcessStatus> getProcessStatusList(ProcessState processState, int offset, int limit) throws RegistryException {
Map<String, Object> queryMap = new HashMap<>();
queryMap.put(DBConstants.ProcessStatus.STATE,processState);
ProcessStatusRepository processStatusRepository = new ProcessStatusRepository();
return processStatusRepository.select(QueryConstants.FIND_PROCESS_WITH_STATUS,limit,offset,queryMap);
}
}
| 858 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessErrorRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.status.ProcessStatus;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessErrorEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessErrorPK;
import org.apache.airavata.model.commons.ErrorModel;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class ProcessErrorRepository extends ExpCatAbstractRepository<ErrorModel, ProcessErrorEntity, ProcessErrorPK> {
private final static Logger logger = LoggerFactory.getLogger(ProcessErrorRepository.class);
public ProcessErrorRepository() { super(ErrorModel.class, ProcessErrorEntity.class); }
protected String saveProcessError(ErrorModel error, String processId) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessErrorEntity processErrorEntity = mapper.map(error, ProcessErrorEntity.class);
if (processErrorEntity.getProcessId() == null) {
logger.debug("Setting the ProcessErrorEntity's ProcessId");
processErrorEntity.setProcessId(processId);
}
execute(entityManager -> entityManager.merge(processErrorEntity));
return processErrorEntity.getErrorId();
}
public String addProcessError(ErrorModel processError, String processId) throws RegistryException {
if (processError.getErrorId() == null) {
logger.debug("Setting the ProcessError's ErrorId");
processError.setErrorId(ExpCatalogUtils.getID("ERROR"));
}
return saveProcessError(processError, processId);
}
public String updateProcessError(ErrorModel updatedProcessError, String processId) throws RegistryException {
return saveProcessError(updatedProcessError, processId);
}
public List<ErrorModel> getProcessError(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
return processModel.getProcessErrors();
}
}
| 859 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessInputRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessInputEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessInputPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class ProcessInputRepository extends ExpCatAbstractRepository<InputDataObjectType, ProcessInputEntity, ProcessInputPK> {
private final static Logger logger = LoggerFactory.getLogger(ProcessInputRepository.class);
public ProcessInputRepository() { super(InputDataObjectType.class, ProcessInputEntity.class); }
protected void saveProcessInput(List<InputDataObjectType> processInputs, String processId) throws RegistryException {
for (InputDataObjectType input : processInputs) {
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessInputEntity processInputEntity = mapper.map(input, ProcessInputEntity.class);
if (processInputEntity.getProcessId() == null) {
logger.debug("Setting the ProcessInputEntity's ProcessId");
processInputEntity.setProcessId(processId);
}
execute(entityManager -> entityManager.merge(processInputEntity));
}
}
public String addProcessInputs(List<InputDataObjectType> processInputs, String processId) throws RegistryException {
saveProcessInput(processInputs, processId);
return processId;
}
public void updateProcessInputs(List<InputDataObjectType> updatedProcessInputs, String processId) throws RegistryException {
saveProcessInput(updatedProcessInputs, processId);
}
public List<InputDataObjectType> getProcessInputs(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
return processModel.getProcessInputs();
}
}
| 860 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessOutputRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessOutputEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessOutputPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class ProcessOutputRepository extends ExpCatAbstractRepository<OutputDataObjectType, ProcessOutputEntity, ProcessOutputPK> {
private final static Logger logger = LoggerFactory.getLogger(ProcessOutputRepository.class);
public ProcessOutputRepository() { super(OutputDataObjectType.class, ProcessOutputEntity.class); }
protected void saveProcessOutput(List<OutputDataObjectType> processOutputs, String processId) throws RegistryException {
for (OutputDataObjectType output : processOutputs) {
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessOutputEntity processOutputEntity = mapper.map(output, ProcessOutputEntity.class);
if (processOutputEntity.getProcessId() == null) {
logger.debug("Setting the ProcessOutputEntity's ProcesstId");
processOutputEntity.setProcessId(processId);
}
execute(entityManager -> entityManager.merge(processOutputEntity));
}
}
public String addProcessOutputs(List<OutputDataObjectType> processOutputs, String processId) throws RegistryException {
saveProcessOutput(processOutputs, processId);
return processId;
}
public void updateProcessOutputs(List<OutputDataObjectType> updatedProcessOutputs, String processId) throws RegistryException {
saveProcessOutput(updatedProcessOutputs, processId);
}
public List<OutputDataObjectType> getProcessOutputs(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
return processModel.getProcessOutputs();
}
}
| 861 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/TaskRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.model.task.TaskModel;
import org.apache.airavata.registry.core.entities.expcatalog.TaskEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TaskRepository extends ExpCatAbstractRepository<TaskModel, TaskEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(TaskRepository.class);
private final JobRepository jobRepository = new JobRepository();
public TaskRepository() { super(TaskModel.class, TaskEntity.class); }
protected String saveTaskModelData(TaskModel taskModel) throws RegistryException {
TaskEntity taskEntity = saveTask(taskModel);
return taskEntity.getTaskId();
}
protected TaskEntity saveTask(TaskModel taskModel) throws RegistryException {
if (taskModel.getTaskId() == null || taskModel.getTaskId().equals(airavata_commonsConstants.DEFAULT_ID)) {
logger.debug("Setting the Task's TaskId");
taskModel.setTaskId(ExpCatalogUtils.getID("TASK"));
}
String taskId = taskModel.getTaskId();
if (taskModel.getTaskStatuses() != null) {
logger.debug("Populating the status id of TaskStatus objects for the Task");
taskModel.getTaskStatuses().forEach(taskStatusEntity -> {
if (taskStatusEntity.getStatusId() == null) {
taskStatusEntity.setStatusId(ExpCatalogUtils.getID("TASK_STATE"));
}
});
}
if (!isTaskExist(taskId)) {
logger.debug("Setting creation time if Task doesn't already exist");
taskModel.setCreationTime(System.currentTimeMillis());
}
taskModel.setLastUpdateTime(System.currentTimeMillis());
Mapper mapper = ObjectMapperSingleton.getInstance();
TaskEntity taskEntity = mapper.map(taskModel, TaskEntity.class);
populateParentIds(taskEntity);
return execute(entityManager -> entityManager.merge(taskEntity));
}
protected void populateParentIds(TaskEntity taskEntity) {
String taskId = taskEntity.getTaskId();
if (taskEntity.getTaskStatuses() != null) {
logger.debug("Populating the Primary Key of TaskStatus objects for the Task");
taskEntity.getTaskStatuses().forEach(taskStatusEntity -> taskStatusEntity.setTaskId(taskId));
}
if (taskEntity.getTaskErrors() != null) {
logger.debug("Populating the Primary Key of TaskError objects for the Task");
taskEntity.getTaskErrors().forEach(taskErrorEntity -> taskErrorEntity.setTaskId(taskId));
}
if (taskEntity.getJobs() != null) {
logger.debug("Populating the Job objects' Task ID for the Task");
taskEntity.getJobs().forEach(jobEntity -> {
jobEntity.setTaskId(taskId);
jobRepository.populateParentIds(jobEntity);
});
}
}
public String addTask(TaskModel task, String processId) throws RegistryException {
task.setParentProcessId(processId);
String taskId = saveTaskModelData(task);
return taskId;
}
public String updateTask(TaskModel task, String taskId) throws RegistryException {
return saveTaskModelData(task);
}
public TaskModel getTask(String taskId) throws RegistryException {
TaskRepository taskRepository = new TaskRepository();
return taskRepository.get(taskId);
}
public List<TaskModel> getTaskList(String fieldName, Object value) throws RegistryException {
TaskRepository taskRepository = new TaskRepository();
List<TaskModel> taskModelList;
if (fieldName.equals(DBConstants.Task.PARENT_PROCESS_ID)) {
logger.debug("Search criteria is ParentProcessId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Task.PARENT_PROCESS_ID, value);
taskModelList = taskRepository.select(QueryConstants.GET_TASK_FOR_PARENT_PROCESS_ID, -1, 0, queryParameters);
}
else {
logger.error("Unsupported field name for Task module.");
throw new IllegalArgumentException("Unsupported field name for Task module.");
}
return taskModelList;
}
public List<String> getTaskIds(String fieldName, Object value) throws RegistryException {
List<String> taskIds = new ArrayList<>();
List<TaskModel> taskModelList = getTaskList(fieldName, value);
for (TaskModel taskModel : taskModelList) {
taskIds.add(taskModel.getTaskId());
}
return taskIds;
}
public boolean isTaskExist(String taskId) throws RegistryException {
return isExists(taskId);
}
public void removeTask(String taskId) throws RegistryException {
delete(taskId);
}
public void deleteTasks(String processId) throws RegistryException {
TaskRepository taskRepository = new TaskRepository();
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Task.PARENT_PROCESS_ID, processId);
List<TaskModel> taskModelList =
taskRepository.select(QueryConstants.GET_TASK_FOR_PARENT_PROCESS_ID, -1, 0, queryParameters);
for(TaskModel taskModel: taskModelList){
delete(taskModel.getTaskId());
}
}
}
| 862 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExpCatAbstractRepository.java | package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.registry.core.repositories.AbstractRepository;
import org.apache.airavata.registry.core.utils.JPAUtil.ExpCatalogJPAUtils;
import javax.persistence.EntityManager;
public class ExpCatAbstractRepository<T, E, Id> extends AbstractRepository<T, E, Id> {
public ExpCatAbstractRepository(Class<T> thriftGenericClass, Class<E> dbEntityGenericClass) {
super(thriftGenericClass, dbEntityGenericClass);
}
@Override
protected EntityManager getEntityManager() {
return ExpCatalogJPAUtils.getEntityManager();
}
}
| 863 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentOutputRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentOutputEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentOutputPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public class ExperimentOutputRepository extends ExpCatAbstractRepository<OutputDataObjectType, ExperimentOutputEntity, ExperimentOutputPK> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentOutputRepository.class);
public ExperimentOutputRepository() {
super(OutputDataObjectType.class, ExperimentOutputEntity.class);
}
protected void saveExperimentOutput(List<OutputDataObjectType> experimentOutputs, String experimentId) throws RegistryException {
for (OutputDataObjectType output : experimentOutputs) {
Mapper mapper = ObjectMapperSingleton.getInstance();
ExperimentOutputEntity experimentOutputEntity = mapper.map(output, ExperimentOutputEntity.class);
if (experimentOutputEntity.getExperimentId() == null) {
logger.debug("Setting the ExperimentOutputEntity's ExperimentId");
experimentOutputEntity.setExperimentId(experimentId);
}
execute(entityManager -> entityManager.merge(experimentOutputEntity));
}
}
public String addExperimentOutputs(List<OutputDataObjectType> experimentOutputs, String experimentId) throws RegistryException {
saveExperimentOutput(experimentOutputs, experimentId);
return experimentId;
}
public void updateExperimentOutputs(List<OutputDataObjectType> updatedExperimentOutputs, String experimentId) throws RegistryException {
saveExperimentOutput(updatedExperimentOutputs, experimentId);
}
public List<OutputDataObjectType> getExperimentOutputs(String experimentId) throws RegistryException {
ExperimentRepository experimentRepository = new ExperimentRepository();
ExperimentModel experimentModel = experimentRepository.getExperiment(experimentId);
return experimentModel.getExperimentOutputs();
}
}
| 864 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentStatusRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.status.ExperimentState;
import org.apache.airavata.model.status.ExperimentStatus;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentStatusEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentStatusPK;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.List;
public class ExperimentStatusRepository extends ExpCatAbstractRepository<ExperimentStatus, ExperimentStatusEntity, ExperimentStatusPK> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentStatusRepository.class);
public ExperimentStatusRepository() { super(ExperimentStatus.class, ExperimentStatusEntity.class); }
protected String saveExperimentStatus(ExperimentStatus experimentStatus, String experimentId) throws RegistryException {
if (experimentStatus.getStatusId() == null) {
ExperimentStatus currentExperimentStatus = getExperimentStatus(experimentId);
if (currentExperimentStatus == null || currentExperimentStatus.getState() != experimentStatus.getState()) {
experimentStatus.setStatusId(ExpCatalogUtils.getID("EXPERIMENT_STATE"));
} else {
// Update the existing current status if experimentStatus has no status id and the same state
experimentStatus.setStatusId(currentExperimentStatus.getStatusId());
}
}
Mapper mapper = ObjectMapperSingleton.getInstance();
ExperimentStatusEntity experimentStatusEntity = mapper.map(experimentStatus, ExperimentStatusEntity.class);
if (experimentStatusEntity.getExperimentId() == null) {
logger.debug("Setting the ExperimentStatusEntity's ExperimentId");
experimentStatusEntity.setExperimentId(experimentId);
}
execute(entityManager -> entityManager.merge(experimentStatusEntity));
return experimentStatusEntity.getStatusId();
}
public String addExperimentStatus(ExperimentStatus experimentStatus, String experimentId) throws RegistryException {
if (experimentStatus.getStatusId() == null) {
logger.debug("Setting the ExperimentStatus's StatusId");
experimentStatus.setStatusId(ExpCatalogUtils.getID("EXPERIMENT_STATE"));
}
experimentStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
return saveExperimentStatus(experimentStatus, experimentId);
}
public String updateExperimentStatus(ExperimentStatus updatedExperimentStatus, String experimentId) throws RegistryException {
return saveExperimentStatus(updatedExperimentStatus, experimentId);
}
public ExperimentStatus getExperimentStatus(String experimentId) throws RegistryException {
ExperimentRepository experimentRepository = new ExperimentRepository();
ExperimentModel experimentModel = experimentRepository.getExperiment(experimentId);
List<ExperimentStatus> experimentStatusList = experimentModel.getExperimentStatus();
if(experimentStatusList.size() == 0) {
logger.debug("ExperimentStatus list is empty");
return null;
}
else {
ExperimentStatus latestExperimentStatus = experimentStatusList.get(0);
for (int i = 1; i < experimentStatusList.size(); i++) {
Timestamp timeOfStateChange = new Timestamp(experimentStatusList.get(i).getTimeOfStateChange());
if (timeOfStateChange != null) {
if (timeOfStateChange.after(new Timestamp(latestExperimentStatus.getTimeOfStateChange()))
|| (timeOfStateChange.equals(latestExperimentStatus.getTimeOfStateChange()) && experimentStatusList.get(i).getState().equals(ExperimentState.COMPLETED.toString()))
|| (timeOfStateChange.equals(latestExperimentStatus.getTimeOfStateChange()) && experimentStatusList.get(i).getState().equals(ExperimentState.FAILED.toString()))
|| (timeOfStateChange.equals(latestExperimentStatus.getTimeOfStateChange()) && experimentStatusList.get(i).getState().equals(ExperimentState.CANCELED.toString()))) {
latestExperimentStatus = experimentStatusList.get(i);
}
}
}
return latestExperimentStatus;
}
}
}
| 865 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProjectRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.model.workspace.Project;
import org.apache.airavata.registry.core.entities.expcatalog.ProjectEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.ResultOrderType;
import org.apache.airavata.registry.cpi.utils.Constants;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.*;
public class ProjectRepository extends ExpCatAbstractRepository<Project, ProjectEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ProjectRepository.class);
public ProjectRepository() { super(Project.class, ProjectEntity.class); }
protected String saveProjectData(Project project, String gatewayId) throws RegistryException {
ProjectEntity projectEntity = saveProject(project, gatewayId);
return projectEntity.getProjectID();
}
protected ProjectEntity saveProject(Project project, String gatewayId) throws RegistryException {
if (project.getProjectID() == null || project.getProjectID().equals(airavata_commonsConstants.DEFAULT_ID)) {
logger.debug("Setting the Project's ProjectId");
project.setProjectID(AiravataUtils.getId(project.getName()));
}
Mapper mapper = ObjectMapperSingleton.getInstance();
ProjectEntity projectEntity = mapper.map(project, ProjectEntity.class);
if (project.getGatewayId() == null) {
logger.debug("Setting the Project's GatewayId");
projectEntity.setGatewayId(gatewayId);
}
if (!isProjectExist(projectEntity.getProjectID())) {
logger.debug("Checking if the Project already exists");
projectEntity.setCreationTime(new Timestamp((System.currentTimeMillis())));
}
return execute(entityManager -> entityManager.merge(projectEntity));
}
public String addProject(Project project, String gatewayId) throws RegistryException {
return saveProjectData(project, gatewayId);
}
public void updateProject(Project project, String projectId) throws RegistryException {
project.setProjectID(projectId);
saveProjectData(project, project.getGatewayId());
}
public Project getProject(String projectId) throws RegistryException {
return get(projectId);
}
public List<Project> getProjectList(String fieldName, Object value) throws RegistryException {
return getProjectList(fieldName, value, -1, 0, null, null);
}
public List<Project> getProjectList(String fieldName, Object value, int limit, int offset,
Object orderByIdentifier, ResultOrderType resultOrderType) throws RegistryException{
Map<String, Object> queryParameters = new HashMap<>();
if (fieldName.equals(Constants.FieldConstants.ProjectConstants.OWNER)) {
logger.debug("Checking if the field name is owner");
queryParameters.put(DBConstants.Project.OWNER, value);
List<Project> projectList = select(QueryConstants.GET_ALL_PROJECTS_FOR_OWNER, limit, offset, queryParameters);
if (projectList != null && !projectList.isEmpty()) {
logger.debug("The retrieved list is not empty or null");
return projectList;
}
}
else {
logger.error("Unsupported field name for Project module.");
throw new IllegalArgumentException("Unsupported field name for Project module.");
}
return null;
}
public List<String> getProjectIDs(String fieldName, Object value) throws RegistryException {
List<Project> projectList = getProjectList(fieldName, value);
List<String> projectIds = new ArrayList<>();
if (projectList != null && !projectList.isEmpty()) {
logger.debug("The retrieved list is not empty or null");
for (Project project : projectList) {
projectIds.add(project.getProjectID());
}
}
return projectIds;
}
public List<Project> searchProjects(Map<String, String> filters, int limit,
int offset, Object orderByIdentifier, ResultOrderType resultOrderType) throws RegistryException {
return searchAllAccessibleProjects(null, filters, limit, offset, orderByIdentifier, resultOrderType);
}
public List<Project> searchAllAccessibleProjects(List<String> accessibleProjectIds, Map<String, String> filters, int limit,
int offset, Object orderByIdentifier, ResultOrderType resultOrderType) throws RegistryException {
String query = "SELECT P FROM " + ProjectEntity.class.getSimpleName() + " P WHERE ";
Map<String, Object> queryParameters = new HashMap<>();
if (filters == null || !filters.containsKey(Constants.FieldConstants.ProjectConstants.GATEWAY_ID)) {
logger.error("GatewayId is required");
throw new RegistryException("GatewayId is required");
}
for (String field : filters.keySet()) {
if (field.equals(Constants.FieldConstants.ProjectConstants.GATEWAY_ID)) {
logger.debug("Filter Projects by Gateway ID");
queryParameters.put(DBConstants.Project.GATEWAY_ID, filters.get(field));
query += "P.gatewayId LIKE :" + DBConstants.Project.GATEWAY_ID + " AND ";
}
else if (field.equals(Constants.FieldConstants.ProjectConstants.OWNER)) {
logger.debug("Filter Projects by Owner");
queryParameters.put(DBConstants.Project.OWNER, filters.get(field));
query += "P.owner LIKE :" + DBConstants.Project.OWNER + " AND ";
}
else if (field.equals(Constants.FieldConstants.ProjectConstants.PROJECT_NAME)) {
logger.debug("Filter Projects by Project Name");
queryParameters.put(DBConstants.Project.PROJECT_NAME, filters.get(field));
query += "P.name LIKE :" + DBConstants.Project.PROJECT_NAME + " AND ";
}
else if (field.equals(Constants.FieldConstants.ProjectConstants.DESCRIPTION)) {
logger.debug("Filter Projects by Description");
queryParameters.put(DBConstants.Project.DESCRIPTION, filters.get(field));
query += "P.description LIKE :" + DBConstants.Project.DESCRIPTION + " AND ";
}
else {
logger.error("Unsupported field name for Project module.");
throw new IllegalArgumentException("Unsupported field name for Project module.");
}
}
if (accessibleProjectIds != null && !accessibleProjectIds.isEmpty()) {
logger.debug("Filter Projects by Accessible Project IDs");
queryParameters.put(DBConstants.Project.ACCESSIBLE_PROJECT_IDS, accessibleProjectIds);
query += "P.projectID IN :" + DBConstants.Project.ACCESSIBLE_PROJECT_IDS;
}
else {
logger.debug("Removing the last operator from the query");
query = query.substring(0, query.length() - 5);
}
if (orderByIdentifier != null && resultOrderType != null && orderByIdentifier.equals(Constants.FieldConstants.ProjectConstants.CREATION_TIME)) {
String order = (resultOrderType == ResultOrderType.ASC) ? "ASC" : "DESC";
query += " ORDER BY P." + DBConstants.Project.CREATION_TIME + " " + order;
}
List<Project> projectList = select(query, limit, offset, queryParameters);
return projectList;
}
public boolean isProjectExist(String projectId) throws RegistryException {
return isExists(projectId);
}
public void removeProject(String projectId) throws RegistryException {
delete(projectId);
}
}
| 866 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.scheduling.ComputationalResourceSchedulingModel;
import org.apache.airavata.model.status.ProcessState;
import org.apache.airavata.model.status.ProcessStatus;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ProcessRepository extends ExpCatAbstractRepository<ProcessModel, ProcessEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ProcessRepository.class);
private final TaskRepository taskRepository = new TaskRepository();
public ProcessRepository() {
super(ProcessModel.class, ProcessEntity.class);
}
protected String saveProcessModelData(ProcessModel processModel) throws RegistryException {
ProcessEntity processEntity = saveProcess(processModel);
return processEntity.getProcessId();
}
protected ProcessEntity saveProcess(ProcessModel processModel) throws RegistryException {
if (processModel.getProcessId() == null || processModel.getProcessId().equals(airavata_commonsConstants.DEFAULT_ID)) {
logger.debug("Setting the Process's ProcessId");
processModel.setProcessId(ExpCatalogUtils.getID("PROCESS"));
}
String processId = processModel.getProcessId();
if (processModel.getProcessStatuses() != null) {
logger.debug("Populating the status id of ProcessStatus objects for the Process");
processModel.getProcessStatuses().forEach(processStatusEntity -> {
if (processStatusEntity.getStatusId() == null) {
processStatusEntity.setStatusId(ExpCatalogUtils.getID("PROCESS_STATE"));
}
});
}
if (!isProcessExist(processId)) {
logger.debug("Setting creation time if process doesn't already exist");
processModel.setCreationTime(System.currentTimeMillis());
}
processModel.setLastUpdateTime(System.currentTimeMillis());
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessEntity processEntity = mapper.map(processModel, ProcessEntity.class);
populateParentIds(processEntity);
return execute(entityManager -> entityManager.merge(processEntity));
}
protected void populateParentIds(ProcessEntity processEntity) {
String processId = processEntity.getProcessId();
if (processEntity.getProcessResourceSchedule() != null) {
logger.debug("Populating the Primary Key of ProcessResourceSchedule objects for the Process");
processEntity.getProcessResourceSchedule().setProcessId(processId);
}
if (processEntity.getProcessInputs() != null) {
logger.debug("Populating the Primary Key of ProcessInput objects for the Process");
processEntity.getProcessInputs().forEach(processInputEntity -> processInputEntity.setProcessId(processId));
}
if (processEntity.getProcessOutputs() != null) {
logger.debug("Populating the Primary Key of ProcessOutput objects for the Process");
processEntity.getProcessOutputs().forEach(processOutputEntity -> processOutputEntity.setProcessId(processId));
}
if (processEntity.getProcessStatuses() != null) {
logger.debug("Populating the Primary Key of ProcessStatus objects for the Process");
processEntity.getProcessStatuses().forEach(processStatusEntity -> processStatusEntity.setProcessId(processId));
}
if (processEntity.getProcessErrors() != null) {
logger.debug("Populating the Primary Key of ProcessError objects for the Process");
processEntity.getProcessErrors().forEach(processErrorEntity -> processErrorEntity.setProcessId(processId));
}
if (processEntity.getTasks() != null) {
logger.debug("Populating the Primary Key of Task objects for the Process");
processEntity.getTasks().forEach(taskEntity -> {
taskEntity.setParentProcessId(processId);
taskRepository.populateParentIds(taskEntity);
});
}
}
public String addProcess(ProcessModel process, String experimentId) throws RegistryException {
process.setExperimentId(experimentId);
ProcessStatus processStatus = new ProcessStatus(ProcessState.CREATED);
process.addToProcessStatuses(processStatus);
String processId = saveProcessModelData(process);
return processId;
}
public void updateProcess(ProcessModel updatedProcess, String processId) throws RegistryException {
saveProcessModelData(updatedProcess);
}
public ProcessModel getProcess(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
return processRepository.get(processId);
}
public String addProcessResourceSchedule(ComputationalResourceSchedulingModel computationalResourceSchedulingModel, String processId) throws RegistryException {
ProcessModel processModel = getProcess(processId);
processModel.setProcessResourceSchedule(computationalResourceSchedulingModel);
updateProcess(processModel, processId);
return processId;
}
public String updateProcessResourceSchedule(ComputationalResourceSchedulingModel computationalResourceSchedulingModel, String processId) throws RegistryException {
return addProcessResourceSchedule(computationalResourceSchedulingModel, processId);
}
public ComputationalResourceSchedulingModel getProcessResourceSchedule(String processId) throws RegistryException {
ProcessModel processModel = getProcess(processId);
return processModel.getProcessResourceSchedule();
}
public List<ProcessModel> getProcessList(String fieldName, Object value) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
List<ProcessModel> processModelList;
if (fieldName.equals(DBConstants.Process.EXPERIMENT_ID)) {
logger.debug("Search criteria is ExperimentId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Process.EXPERIMENT_ID, value);
processModelList = processRepository.select(QueryConstants.GET_PROCESS_FOR_EXPERIMENT_ID, -1, 0, queryParameters);
} else {
logger.error("Unsupported field name for Process module.");
throw new IllegalArgumentException("Unsupported field name for Process module.");
}
return processModelList;
}
public List<String> getProcessIds(String fieldName, Object value) throws RegistryException {
List<String> processIds = new ArrayList<>();
List<ProcessModel> processModelList = getProcessList(fieldName, value);
for (ProcessModel processModel : processModelList) {
processIds.add(processModel.getProcessId());
}
return processIds;
}
public boolean isProcessExist(String processId) throws RegistryException {
return isExists(processId);
}
public void removeProcess(String processId) throws RegistryException {
delete(processId);
}
public List<ProcessModel> getAllProcesses(int offset, int limit) {
ProcessRepository processRepository = new ProcessRepository();
return processRepository.select(QueryConstants.GET_ALL_PROCESSES, limit, offset, new HashMap<>());
}
public Map<String,Double> getAVGTimeDistribution(String gatewayId,double searchTime){
ProcessRepository processRepository = new ProcessRepository();
Map<String, Double> timeDistributions = new HashMap<>();
List<Object> orchTimeList = processRepository.selectWithNativeQuery(QueryConstants.FIND_AVG_TIME_UPTO_METASCHEDULER_NATIVE_QUERY,
gatewayId,String.valueOf(searchTime));
List<Object> queueingTimeList = processRepository.selectWithNativeQuery(QueryConstants.FIND_AVG_TIME_QUEUED_NATIVE_QUERY,
gatewayId,String.valueOf(searchTime));
List<Object> helixTimeList = processRepository.selectWithNativeQuery(QueryConstants.FIND_AVG_TIME_HELIX_NATIVE_QUERY,
gatewayId,String.valueOf(searchTime));
if(orchTimeList.size()>0 && orchTimeList.get(0) != null){
timeDistributions.put(DBConstants.MetaData.ORCH_TIME, ((BigDecimal)orchTimeList.get(0)).doubleValue());
}
if(queueingTimeList.size()>0 && queueingTimeList.get(0) != null){
timeDistributions.put(DBConstants.MetaData.QUEUED_TIME,((BigDecimal)queueingTimeList.get(0)).doubleValue());
}
if(helixTimeList.size()>0 && helixTimeList.get(0) != null){
timeDistributions.put(DBConstants.MetaData.HELIX,((BigDecimal)helixTimeList.get(0)).doubleValue());
}
return timeDistributions;
}
}
| 867 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/TaskStatusRepository.java | package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.status.TaskState;
import org.apache.airavata.model.status.TaskStatus;
import org.apache.airavata.model.task.TaskModel;
import org.apache.airavata.registry.core.entities.expcatalog.TaskStatusEntity;
import org.apache.airavata.registry.core.entities.expcatalog.TaskStatusPK;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.List;
public class TaskStatusRepository extends ExpCatAbstractRepository<TaskStatus, TaskStatusEntity, TaskStatusPK> {
private final static Logger logger = LoggerFactory.getLogger(TaskStatusRepository.class);
public TaskStatusRepository() { super(TaskStatus.class, TaskStatusEntity.class); }
protected String saveTaskStatus(TaskStatus taskStatus, String taskId) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
TaskStatusEntity taskStatusEntity = mapper.map(taskStatus, TaskStatusEntity.class);
if (taskStatusEntity.getTaskId() == null) {
logger.debug("Setting the TaskStatusEntity's TaskId");
taskStatusEntity.setTaskId(taskId);
}
execute(entityManager -> entityManager.merge(taskStatusEntity));
return taskStatusEntity.getStatusId();
}
public String addTaskStatus(TaskStatus taskStatus, String taskId) throws RegistryException {
if (taskStatus.getStatusId() == null) {
logger.debug("Setting the TaskStatus's StatusId");
taskStatus.setStatusId(ExpCatalogUtils.getID("TASK_STATE"));
}
return saveTaskStatus(taskStatus, taskId);
}
public String updateTaskStatus(TaskStatus updatedTaskStatus, String taskId) throws RegistryException {
return saveTaskStatus(updatedTaskStatus, taskId);
}
public TaskStatus getTaskStatus(String taskId) throws RegistryException {
TaskRepository taskRepository = new TaskRepository();
TaskModel taskModel = taskRepository.getTask(taskId);
List<TaskStatus> taskStatusList = taskModel.getTaskStatuses();
if(taskStatusList.size() == 0) {
logger.debug("TaskStatus list is empty");
return null;
}
else {
TaskStatus latestTaskStatus = taskStatusList.get(0);
for(int i = 1; i < taskStatusList.size(); i++) {
Timestamp timeOfStateChange = new Timestamp(taskStatusList.get(i).getTimeOfStateChange());
if (timeOfStateChange.after(new Timestamp(latestTaskStatus.getTimeOfStateChange()))
|| (timeOfStateChange.equals(latestTaskStatus.getTimeOfStateChange()) && taskStatusList.get(i).getState().equals(TaskState.COMPLETED.toString()))
|| (timeOfStateChange.equals(latestTaskStatus.getTimeOfStateChange()) && taskStatusList.get(i).getState().equals(TaskState.FAILED.toString()))
|| (timeOfStateChange.equals(latestTaskStatus.getTimeOfStateChange()) && taskStatusList.get(i).getState().equals(TaskState.CANCELED.toString()))) {
latestTaskStatus = taskStatusList.get(i);
}
}
return latestTaskStatus;
}
}
}
| 868 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/UserRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.user.UserProfile;
import org.apache.airavata.registry.core.entities.expcatalog.UserEntity;
import org.apache.airavata.registry.core.entities.expcatalog.UserPK;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
public class UserRepository extends ExpCatAbstractRepository<UserProfile, UserEntity, UserPK> {
public UserRepository() {
super(UserProfile.class, UserEntity.class);
}
public UserProfile addUser(UserProfile user) throws RegistryException {
try {
return create(user);
} catch (Exception e) {
throw new RegistryException("Failed to create user", e);
}
}
public boolean isUserExists(String gatewayId, String username) throws RegistryException {
try {
return isExists(new UserPK(gatewayId, username));
} catch (Exception e) {
throw new RegistryException("Failed to create user", e);
}
}
public List<String> getAllUsernamesInGateway(String gatewayId) throws RegistryException {
try {
List<UserProfile> users = select(QueryConstants.GET_ALL_GATEWAY_USERS, -1, 0, Collections.singletonMap(DBConstants.User.GATEWAY_ID, gatewayId));
return users.stream().map(up -> up.getUserId()).collect(Collectors.toList());
} catch (Exception e) {
throw new RegistryException("Failed to create user", e);
}
}
}
| 869 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/QueueStatusRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.status.QueueStatusModel;
import org.apache.airavata.registry.core.entities.expcatalog.QueueStatusEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
public class QueueStatusRepository extends ExpCatAbstractRepository<QueueStatusModel, QueueStatusEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(QueueStatusRepository.class);
public QueueStatusRepository() {
super(QueueStatusModel.class, QueueStatusEntity.class);
}
public boolean createQueueStatuses(List<QueueStatusModel> queueStatusModels) throws RegistryException {
for (QueueStatusModel queueStatusModel : queueStatusModels) {
Mapper mapper = ObjectMapperSingleton.getInstance();
QueueStatusEntity queueStatusEntity = mapper.map(queueStatusModel, QueueStatusEntity.class);
execute(entityManager -> entityManager.merge(queueStatusEntity));
}
return true;
}
public List<QueueStatusModel> getLatestQueueStatuses() throws RegistryException {
List<QueueStatusModel> queueStatusModelList = select(QueryConstants.GET_ALL_QUEUE_STATUS_MODELS, 0);
return queueStatusModelList;
}
public Optional<QueueStatusModel> getQueueStatus(String hostName, String queueName) throws RegistryException{
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.QueueStatus.HOST_NAME, hostName);
queryParameters.put(DBConstants.QueueStatus.QUEUE_NAME, queueName);
List<QueueStatusModel> queueStatusModels = select(QueryConstants.FIND_QUEUE_STATUS, 1, 0, queryParameters);
if (queueStatusModels != null && !queueStatusModels.isEmpty()) {
return Optional.of(queueStatusModels.get(0));
}
return Optional.empty();
}
}
| 870 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/JobStatusRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.job.JobModel;
import org.apache.airavata.model.status.JobState;
import org.apache.airavata.model.status.JobStatus;
import org.apache.airavata.registry.core.entities.expcatalog.JobPK;
import org.apache.airavata.registry.core.entities.expcatalog.JobStatusEntity;
import org.apache.airavata.registry.core.entities.expcatalog.JobStatusPK;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ExpCatalogUtils;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JobStatusRepository extends ExpCatAbstractRepository<JobStatus, JobStatusEntity, JobStatusPK> {
private final static Logger logger = LoggerFactory.getLogger(JobStatusRepository.class);
public JobStatusRepository() { super(JobStatus.class, JobStatusEntity.class); }
protected String saveJobStatus(JobStatus jobStatus, JobPK jobPK) throws RegistryException {
Mapper mapper = ObjectMapperSingleton.getInstance();
JobStatusEntity jobStatusEntity = mapper.map(jobStatus, JobStatusEntity.class);
if (jobStatusEntity.getJobId() == null) {
logger.debug("Setting the JobStatusEntity's JobId");
jobStatusEntity.setJobId(jobPK.getJobId());
}
if (jobStatusEntity.getTaskId() == null) {
logger.debug("Setting the JobStatusEntity's TaskId");
jobStatusEntity.setTaskId(jobPK.getTaskId());
}
execute(entityManager -> entityManager.merge(jobStatusEntity));
return jobStatusEntity.getStatusId();
}
public String addJobStatus(JobStatus jobStatus, JobPK jobPK) throws RegistryException {
if (jobStatus.getStatusId() == null) {
logger.debug("Setting the JobStatusEntity's StatusId");
jobStatus.setStatusId(ExpCatalogUtils.getID("JOB_STATE"));
}
return saveJobStatus(jobStatus, jobPK);
}
public String updateJobStatus(JobStatus updatedJobStatus, JobPK jobPK) throws RegistryException {
return saveJobStatus(updatedJobStatus, jobPK);
}
public JobStatus getJobStatus(JobPK jobPK) throws RegistryException {
JobRepository jobRepository = new JobRepository();
JobModel jobModel = jobRepository.getJob(jobPK);
List<JobStatus> jobStatusList = jobModel.getJobStatuses();
if(jobStatusList.size() == 0) {
logger.debug("JobStatus list is empty");
return null;
}
else {
JobStatus latestJobStatus = jobStatusList.get(0);
for(int i = 1; i < jobStatusList.size(); i++) {
Timestamp timeOfStateChange = new Timestamp(jobStatusList.get(i).getTimeOfStateChange());
if (timeOfStateChange.after(new Timestamp(latestJobStatus.getTimeOfStateChange()))
|| (timeOfStateChange.equals(latestJobStatus.getTimeOfStateChange()) && jobStatusList.get(i).getJobState().equals(JobState.COMPLETE.toString()))
|| (timeOfStateChange.equals(latestJobStatus.getTimeOfStateChange()) && jobStatusList.get(i).getJobState().equals(JobState.FAILED.toString()))
|| (timeOfStateChange.equals(latestJobStatus.getTimeOfStateChange()) && jobStatusList.get(i).getJobState().equals(JobState.CANCELED.toString()))) {
latestJobStatus = jobStatusList.get(i);
}
}
return latestJobStatus;
}
}
public List<JobStatus> getDistinctListofJobStatus(String gatewayId,String status, double time){
JobStatusRepository jobStatusRepository = new JobStatusRepository();
return jobStatusRepository.selectWithNativeQuery(QueryConstants.FIND_JOB_COUNT_NATIVE_QUERY,
gatewayId,status,String.valueOf(time));
}
}
| 871 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ProcessWorkflowRepository.java | package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.model.process.ProcessModel;
import org.apache.airavata.model.process.ProcessWorkflow;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessWorkflowEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessWorkflowPK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.RegistryException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.List;
public class ProcessWorkflowRepository extends ExpCatAbstractRepository<ProcessWorkflow, ProcessWorkflowEntity, ProcessWorkflowPK> {
private final static Logger logger = LoggerFactory.getLogger(ProcessInputRepository.class);
public ProcessWorkflowRepository() {
super(ProcessWorkflow.class, ProcessWorkflowEntity.class);
}
protected void saveProcessWorkflow(List<ProcessWorkflow> processWorkflows, String processId) throws RegistryException {
for (ProcessWorkflow processWorkflow : processWorkflows) {
Mapper mapper = ObjectMapperSingleton.getInstance();
ProcessWorkflowEntity processWorkflowEntity = mapper.map(processWorkflow, ProcessWorkflowEntity.class);
if (processWorkflowEntity.getProcessId() == null) {
logger.debug("Setting the ProcessWorkflowEntity's ProcessId");
processWorkflowEntity.setProcessId(processId);
}
execute(entityManager -> entityManager.merge(processWorkflowEntity));
}
}
public String addProcessWorkflow(ProcessWorkflow processWorkflow, String processId) throws RegistryException {
saveProcessWorkflow(Collections.singletonList(processWorkflow), processId);
return processId;
}
public void addProcessWorkflows(List<ProcessWorkflow> processWorkflows, String processId) throws RegistryException {
saveProcessWorkflow(processWorkflows, processId);
}
public List<ProcessWorkflow> getProcessWorkflows(String processId) throws RegistryException {
ProcessRepository processRepository = new ProcessRepository();
ProcessModel processModel = processRepository.getProcess(processId);
return processModel.getProcessWorkflows();
}
}
| 872 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/expcatalog/ExperimentRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.expcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.experiment.ExperimentModel;
import org.apache.airavata.model.experiment.UserConfigurationDataModel;
import org.apache.airavata.model.status.ExperimentState;
import org.apache.airavata.model.status.ExperimentStatus;
import org.apache.airavata.registry.core.entities.expcatalog.ComputationalResourceSchedulingEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ExperimentEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessInputEntity;
import org.apache.airavata.registry.core.entities.expcatalog.ProcessOutputEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.RegistryException;
import org.apache.airavata.registry.cpi.ResultOrderType;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ExperimentRepository extends ExpCatAbstractRepository<ExperimentModel, ExperimentEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ExperimentRepository.class);
public ExperimentRepository() {
super(ExperimentModel.class, ExperimentEntity.class);
}
protected String saveExperimentModelData(ExperimentModel experimentModel) throws RegistryException {
ExperimentEntity experimentEntity = saveExperiment(experimentModel);
return experimentEntity.getExperimentId();
}
protected ExperimentEntity saveExperiment(ExperimentModel experimentModel) throws RegistryException {
String experimentId = experimentModel.getExperimentId();
if (experimentModel.getExperimentStatus() != null) {
logger.debug("Populating the status id of ExperimentStatus objects for the Experiment");
experimentModel.getExperimentStatus().forEach(experimentStatusEntity -> {
if (experimentStatusEntity.getStatusId() == null) {
experimentStatusEntity.setStatusId(AiravataUtils.getId("EXPERIMENT_STATE"));
}
});
}
if (experimentModel.getProcesses() != null) {
logger.debug("Populating the Process objects' Experiment ID for the Experiment");
experimentModel.getProcesses().forEach(processModel -> processModel.setExperimentId(experimentId));
}
if (!isExperimentExist(experimentId)) {
logger.debug("Populating creation time if it doesn't already exist");
experimentModel.setCreationTime(System.currentTimeMillis());
}
Mapper mapper = ObjectMapperSingleton.getInstance();
ExperimentEntity experimentEntity = mapper.map(experimentModel, ExperimentEntity.class);
if (experimentEntity.getUserConfigurationData() != null) {
logger.debug("Populating the Primary Key of UserConfigurationData object for the Experiment");
experimentEntity.getUserConfigurationData().setExperimentId(experimentId);
}
if (experimentEntity.getUserConfigurationData().getAutoScheduledCompResourceSchedulingList() != null) {
logger.debug("Populating the Primary Key of UserConfigurationData.ComputationalResourceSchedulingEntities object for the Experiment");
for(ComputationalResourceSchedulingEntity entity : experimentEntity.getUserConfigurationData().getAutoScheduledCompResourceSchedulingList()){
entity.setExperimentId(experimentId);
}
}
if (experimentEntity.getExperimentInputs() != null) {
logger.debug("Populating the Primary Key of ExperimentInput objects for the Experiment");
experimentEntity.getExperimentInputs().forEach(experimentInputEntity -> experimentInputEntity.setExperimentId(experimentId));
}
if (experimentEntity.getExperimentOutputs() != null) {
logger.debug("Populating the Primary Key of ExperimentOutput objects for the Experiment");
experimentEntity.getExperimentOutputs().forEach(experimentOutputEntity -> experimentOutputEntity.setExperimentId(experimentId));
}
if (experimentEntity.getExperimentStatus() != null) {
logger.debug("Populating the Primary Key of ExperimentStatus objects for the Experiment");
experimentEntity.getExperimentStatus().forEach(experimentStatusEntity -> experimentStatusEntity.setExperimentId(experimentId));
}
if (experimentEntity.getErrors() != null) {
logger.debug("Populating the Primary Key of ExperimentError objects for the Experiment");
experimentEntity.getErrors().forEach(experimentErrorEntity -> experimentErrorEntity.setExperimentId(experimentId));
}
return execute(entityManager -> entityManager.merge(experimentEntity));
}
public String addExperiment(ExperimentModel experimentModel) throws RegistryException {
ExperimentStatus experimentStatus = new ExperimentStatus();
experimentStatus.setState(ExperimentState.CREATED);
experimentStatus.setTimeOfStateChange(AiravataUtils.getCurrentTimestamp().getTime());
experimentModel.addToExperimentStatus(experimentStatus);
String expName = experimentModel.getExperimentName();
// This is to avoid overflow of experiment id size. Total experiment id length is <= 50 + UUID
experimentModel.setExperimentId(AiravataUtils.getId(expName.substring(0, Math.min(expName.length(), 50))));
return saveExperimentModelData(experimentModel);
}
public void updateExperiment(ExperimentModel updatedExperimentModel, String experimentId) throws RegistryException {
saveExperimentModelData(updatedExperimentModel);
}
public ExperimentModel getExperiment(String experimentId) throws RegistryException {
return get(experimentId);
}
public String addUserConfigurationData(UserConfigurationDataModel userConfigurationDataModel, String experimentId) throws RegistryException {
ExperimentModel experimentModel = getExperiment(experimentId);
experimentModel.setUserConfigurationData(userConfigurationDataModel);
updateExperiment(experimentModel, experimentId);
return experimentId;
}
public String updateUserConfigurationData(UserConfigurationDataModel updatedUserConfigurationDataModel, String experimentId) throws RegistryException {
return addUserConfigurationData(updatedUserConfigurationDataModel, experimentId);
}
public UserConfigurationDataModel getUserConfigurationData(String experimentId) throws RegistryException {
ExperimentModel experimentModel = getExperiment(experimentId);
return experimentModel.getUserConfigurationData();
}
public List<ExperimentModel> getExperimentList(String gatewayId, String fieldName, Object value, int limit, int offset,
Object orderByIdentifier, ResultOrderType resultOrderType) throws RegistryException {
List<ExperimentModel> experimentModelList;
if (fieldName.equals(DBConstants.Experiment.USER_NAME)) {
logger.debug("Search criteria is Username");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Experiment.USER_NAME, value);
queryParameters.put(DBConstants.Experiment.GATEWAY_ID, gatewayId);
experimentModelList = select(QueryConstants.GET_EXPERIMENTS_FOR_USER, limit, offset, queryParameters);
}
else if (fieldName.equals(DBConstants.Experiment.PROJECT_ID)) {
logger.debug("Search criteria is ProjectId");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Experiment.PROJECT_ID, value);
queryParameters.put(DBConstants.Experiment.GATEWAY_ID, gatewayId);
experimentModelList = select(QueryConstants.GET_EXPERIMENTS_FOR_PROJECT_ID, limit, offset, queryParameters);
}
else {
logger.error("Unsupported field name for Experiment module.");
throw new IllegalArgumentException("Unsupported field name for Experiment module.");
}
return experimentModelList;
}
public boolean isExperimentExist(String experimentId) throws RegistryException {
return isExists(experimentId);
}
public void removeExperiment(String experimentId) throws RegistryException {
delete(experimentId);
}
}
| 873 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/replicacatalog/DataProductRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.replicacatalog;
import org.apache.airavata.model.data.replica.DataProductModel;
import org.apache.airavata.model.data.replica.DataProductType;
import org.apache.airavata.registry.core.entities.replicacatalog.DataProductEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.DataProductInterface;
import org.apache.airavata.registry.cpi.ReplicaCatalogException;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.*;
public class DataProductRepository extends RepCatAbstractRepository<DataProductModel, DataProductEntity, String> implements DataProductInterface {
private final static Logger logger = LoggerFactory.getLogger(DataProductRepository.class);
private final static DataReplicaLocationRepository dataReplicaLocationRepository = new DataReplicaLocationRepository();
public DataProductRepository() {
super(DataProductModel.class, DataProductEntity.class);
}
protected String saveDataProductModelData(DataProductModel dataProductModel) throws ReplicaCatalogException {
DataProductEntity dataProductEntity = saveDataProduct(dataProductModel);
return dataProductEntity.getProductUri();
}
protected DataProductEntity saveDataProduct(DataProductModel dataProductModel) throws ReplicaCatalogException {
if (dataProductModel.getProductUri() == null) {
logger.debug("Setting the Product URI for the new Data Product");
dataProductModel.setProductUri(DataProductInterface.schema + "://" + UUID.randomUUID().toString());
}
String productUri = dataProductModel.getProductUri();
Mapper mapper = ObjectMapperSingleton.getInstance();
DataProductEntity dataProductEntity = mapper.map(dataProductModel, DataProductEntity.class);
if (dataProductEntity.getOwnerName() == null || dataProductEntity.getGatewayId() == null) {
logger.error("Owner name and/or gateway ID is empty");
throw new ReplicaCatalogException("Owner name and gateway ID should not be empty");
}
if (dataProductEntity.getParentProductUri() != null && (!isExists(dataProductEntity.getParentProductUri())
|| !getDataProduct(dataProductEntity.getParentProductUri()).getDataProductType().equals(DataProductType.COLLECTION))) {
logger.error("Parent product does not exist and/or parent type is not Collection");
throw new ReplicaCatalogException("Parent product does not exist or parent type is not Collection");
}
final Timestamp currentTime = new Timestamp(System.currentTimeMillis());
if (!isDataProductExists(productUri)) {
logger.debug("Checking if the Data Product already exists");
dataProductEntity.setCreationTime(currentTime);
}
if (dataProductEntity.getReplicaLocations() != null) {
logger.debug("Populating the product URI for ReplicaLocations objects for the Data Product");
dataProductEntity.getReplicaLocations().forEach(dataReplicaLocationEntity -> {
dataReplicaLocationEntity.setProductUri(productUri);
if (dataReplicaLocationEntity.getReplicaId() == null) {
dataReplicaLocationEntity.setReplicaId(UUID.randomUUID().toString());
}
if (!dataReplicaLocationRepository.isExists(dataReplicaLocationEntity.getReplicaId())){
dataReplicaLocationEntity.setCreationTime(currentTime);
}
dataReplicaLocationEntity.setLastModifiedTime(currentTime);
});
}
dataProductEntity.setLastModifiedTime(currentTime);
return execute(entityManager -> entityManager.merge(dataProductEntity));
}
@Override
public String registerDataProduct(DataProductModel dataProductModel) throws ReplicaCatalogException {
return saveDataProductModelData(dataProductModel);
}
@Override
public boolean updateDataProduct(DataProductModel updatedDataProductModel) throws ReplicaCatalogException {
return (saveDataProductModelData(updatedDataProductModel) != null);
}
@Override
public DataProductModel getDataProduct(String productUri) throws ReplicaCatalogException {
return get(productUri);
}
@Override
public DataProductModel getParentDataProduct(String productUri) throws ReplicaCatalogException {
DataProductModel dataProductModel = getDataProduct(productUri);
return get(dataProductModel.getParentProductUri());
}
@Override
public List<DataProductModel> getChildDataProducts(String parentProductUri) throws ReplicaCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.DataProduct.PARENT_PRODUCT_URI, parentProductUri);
List<DataProductModel> dataProductModelList = select(QueryConstants.FIND_ALL_CHILD_DATA_PRODUCTS, -1, 0, queryParameters);
return dataProductModelList;
}
@Override
public List<DataProductModel> searchDataProductsByName(String gatewayId, String userId, String productName,
int limit, int offset) throws ReplicaCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.DataProduct.GATEWAY_ID, gatewayId);
queryParameters.put(DBConstants.DataProduct.OWNER_NAME, userId);
queryParameters.put(DBConstants.DataProduct.PRODUCT_NAME, productName);
List<DataProductModel> dataProductModelList = select(QueryConstants.FIND_DATA_PRODUCT_BY_NAME, limit, offset, queryParameters);
return dataProductModelList;
}
@Override
public boolean isDataProductExists(String productUri) throws ReplicaCatalogException {
return isExists(productUri);
}
@Override
public boolean removeDataProduct(String productUri) throws ReplicaCatalogException {
return delete(productUri);
}
}
| 874 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/replicacatalog/RepCatAbstractRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.replicacatalog;
import org.apache.airavata.registry.core.repositories.AbstractRepository;
import org.apache.airavata.registry.core.utils.JPAUtil.RepCatalogJPAUtils;
import javax.persistence.EntityManager;
public class RepCatAbstractRepository<T, E, Id> extends AbstractRepository<T, E, Id> {
public RepCatAbstractRepository(Class<T> thriftGenericClass, Class<E> dbEntityGenericClass) {
super(thriftGenericClass, dbEntityGenericClass);
}
@Override
protected EntityManager getEntityManager() {
return RepCatalogJPAUtils.getEntityManager();
}
}
| 875 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/replicacatalog/DataReplicaLocationRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.replicacatalog;
import org.apache.airavata.model.data.replica.DataProductModel;
import org.apache.airavata.model.data.replica.DataReplicaLocationModel;
import org.apache.airavata.registry.core.entities.replicacatalog.DataReplicaLocationEntity;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.cpi.DataReplicaLocationInterface;
import org.apache.airavata.registry.cpi.ReplicaCatalogException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.List;
import java.util.UUID;
public class DataReplicaLocationRepository extends RepCatAbstractRepository<DataReplicaLocationModel, DataReplicaLocationEntity, String> implements DataReplicaLocationInterface {
private final static Logger logger = LoggerFactory.getLogger(DataReplicaLocationRepository.class);
public DataReplicaLocationRepository() { super(DataReplicaLocationModel.class, DataReplicaLocationEntity.class); }
private String saveDataReplicaLocationModelData(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
DataReplicaLocationEntity dataReplicaLocationEntity = saveDataReplicaLocation(dataReplicaLocationModel);
return dataReplicaLocationEntity.getReplicaId();
}
private DataReplicaLocationEntity saveDataReplicaLocation(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
if (dataReplicaLocationModel.getReplicaId() == null) {
logger.debug("Setting the Replica ID for the new Data Replica Location");
dataReplicaLocationModel.setReplicaId(UUID.randomUUID().toString());
}
String replicaId = dataReplicaLocationModel.getReplicaId();
dataReplicaLocationModel.setReplicaId(replicaId);
Mapper mapper = ObjectMapperSingleton.getInstance();
DataReplicaLocationEntity dataReplicaLocationEntity = mapper.map(dataReplicaLocationModel, DataReplicaLocationEntity.class);
if (!isExists(replicaId)) {
logger.debug("Checking if the Data Replica Location already exists");
dataReplicaLocationEntity.setCreationTime(new Timestamp(System.currentTimeMillis()));
}
dataReplicaLocationEntity.setLastModifiedTime(new Timestamp(System.currentTimeMillis()));
return execute(entityManager -> entityManager.merge(dataReplicaLocationEntity));
}
@Override
public String registerReplicaLocation(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
return saveDataReplicaLocationModelData(dataReplicaLocationModel);
}
@Override
public boolean updateReplicaLocation(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
return (saveDataReplicaLocationModelData(dataReplicaLocationModel) != null);
}
@Override
public DataReplicaLocationModel getReplicaLocation(String replicaId) throws ReplicaCatalogException {
return get(replicaId);
}
@Override
public List<DataReplicaLocationModel> getAllReplicaLocations(String productUri) throws ReplicaCatalogException {
DataProductRepository dataProductRepository = new DataProductRepository();
DataProductModel dataProductModel = dataProductRepository.getDataProduct(productUri);
List<DataReplicaLocationModel> dataReplicaLocationModelList = dataProductModel.getReplicaLocations();
return dataReplicaLocationModelList;
}
@Override
public boolean removeReplicaLocation(String replicaId) throws ReplicaCatalogException {
return delete(replicaId);
}
}
| 876 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ParserRepository.java | package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.parser.Parser;
import org.apache.airavata.registry.core.entities.appcatalog.ParserEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ParserRepository extends AppCatAbstractRepository<Parser, ParserEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ParserRepository.class);
public ParserRepository() {
super(Parser.class, ParserEntity.class);
}
public Parser saveParser(Parser parser) throws AppCatalogException {
try {
Mapper mapper = ObjectMapperSingleton.getInstance();
ParserEntity parserEntity = mapper.map(parser, ParserEntity.class);
if (parser.getInputFiles() != null) {
parserEntity.getInputFiles().forEach(input -> {
input.setParser(parserEntity);
input.setParserId(parserEntity.getId());
});
}
if (parser.getOutputFiles() != null) {
parserEntity.getOutputFiles().forEach(output -> {
output.setParser(parserEntity);
output.setParserId(parserEntity.getId());
});
}
ParserEntity savedParserEntity = execute(entityManager -> entityManager.merge(parserEntity));
return mapper.map(savedParserEntity, Parser.class);
} catch (Exception e) {
logger.error("Failed to save parser with id " + parser.getId(), e);
throw new AppCatalogException("Failed to save parser with id " + parser.getId(), e);
}
}
public List<Parser> getAllParsers(String gatewayId) {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.Parser.GATEWAY_ID, gatewayId);
return select(QueryConstants.FIND_ALL_PARSERS_FOR_GATEWAY_ID, -1, 0, queryParameters);
}
}
| 877 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/UserStoragePreferenceRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
import org.apache.airavata.registry.core.entities.appcatalog.UserStoragePreferenceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.UserStoragePreferencePK;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserStoragePreferenceRepository extends AppCatAbstractRepository<UserStoragePreference, UserStoragePreferenceEntity, UserStoragePreferencePK> {
public UserStoragePreferenceRepository() {
super(UserStoragePreference.class, UserStoragePreferenceEntity.class);
}
}
| 878 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInterfaceRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.model.appcatalog.appinterface.ApplicationInterfaceDescription;
import org.apache.airavata.model.appcatalog.appinterface.application_interface_modelConstants;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.registry.core.entities.appcatalog.AppModuleMappingEntity;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationInterfaceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationModuleEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.airavata.registry.cpi.ApplicationInterface;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.*;
public class ApplicationInterfaceRepository extends AppCatAbstractRepository<ApplicationInterfaceDescription, ApplicationInterfaceEntity, String> implements ApplicationInterface {
private final static Logger logger = LoggerFactory.getLogger(ApplicationInterfaceRepository.class);
public ApplicationInterfaceRepository () {
super(ApplicationInterfaceDescription.class, ApplicationInterfaceEntity.class);
}
protected String saveApplicationInterfaceDescriptorData(
ApplicationInterfaceDescription applicationInterfaceDescription, String gatewayId) throws AppCatalogException {
ApplicationInterfaceEntity applicationInterfaceEntity = saveApplicationInterface(applicationInterfaceDescription, gatewayId);
return applicationInterfaceEntity.getApplicationInterfaceId();
}
protected ApplicationInterfaceEntity saveApplicationInterface(
ApplicationInterfaceDescription applicationInterfaceDescription, String gatewayId) throws AppCatalogException {
if (applicationInterfaceDescription.getApplicationInterfaceId().trim().equals("") || applicationInterfaceDescription.getApplicationInterfaceId().equals(application_interface_modelConstants.DEFAULT_ID) ) {
logger.debug("If Application Interface ID is empty or DEFAULT, set it as the Application Interface Name plus random UUID");
applicationInterfaceDescription.setApplicationInterfaceId(AiravataUtils.getId(applicationInterfaceDescription.getApplicationName()));
}
String applicationInterfaceId = applicationInterfaceDescription.getApplicationInterfaceId();
Mapper mapper = ObjectMapperSingleton.getInstance();
ApplicationInterfaceEntity applicationInterfaceEntity = mapper.map(applicationInterfaceDescription, ApplicationInterfaceEntity.class);
if (gatewayId != null) {
logger.debug("Setting the gateway ID of the Application Interface");
applicationInterfaceEntity.setGatewayId(gatewayId);
}
if (applicationInterfaceEntity.getApplicationInputs() != null) {
logger.debug("Populating the Primary Key of ApplicationInputs objects for the Application Interface");
applicationInterfaceEntity.getApplicationInputs().forEach(applicationInputEntity -> applicationInputEntity.setInterfaceId(applicationInterfaceId));
}
if (applicationInterfaceEntity.getApplicationOutputs() != null) {
logger.debug("Populating the Primary Key of ApplicationOutputs objects for the Application Interface");
applicationInterfaceEntity.getApplicationOutputs().forEach(applicationOutputEntity -> applicationOutputEntity.setInterfaceId(applicationInterfaceId));
}
if (!isApplicationInterfaceExists(applicationInterfaceId)) {
logger.debug("Checking if the Application Interface already exists");
applicationInterfaceEntity.setCreationTime(new Timestamp(System.currentTimeMillis()));
}
applicationInterfaceEntity.setUpdateTime(new Timestamp(System.currentTimeMillis()));
return execute(entityManager -> entityManager.merge(applicationInterfaceEntity));
}
protected String saveApplicationModuleData(
ApplicationModule applicationModule, String gatewayId) throws AppCatalogException {
ApplicationModuleEntity applicationModuleEntity = saveApplicationModule(applicationModule, gatewayId);
return applicationModuleEntity.getAppModuleId();
}
protected ApplicationModuleEntity saveApplicationModule(
ApplicationModule applicationModule, String gatewayId) throws AppCatalogException {
if (applicationModule.getAppModuleId().trim().equals("") || applicationModule.getAppModuleId().equals(application_interface_modelConstants.DEFAULT_ID)) {
logger.debug("If Application Module ID is empty or DEFAULT, set it as the Application Module Name plus random UUID");
applicationModule.setAppModuleId(AiravataUtils.getId(applicationModule.getAppModuleName()));
}
String applicationModuleId = applicationModule.getAppModuleId();
Mapper mapper = ObjectMapperSingleton.getInstance();
ApplicationModuleEntity applicationModuleEntity = mapper.map(applicationModule, ApplicationModuleEntity.class);
if (gatewayId != null) {
logger.debug("Setting the gateway ID of the Application Module");
applicationModuleEntity.setGatewayId(gatewayId);
}
if (!isApplicationModuleExists(applicationModuleId)) {
logger.debug("Checking if the Application Module already exists");
applicationModuleEntity.setCreationTime(new Timestamp(System.currentTimeMillis()));
}
applicationModuleEntity.setUpdateTime(new Timestamp(System.currentTimeMillis()));
return execute(entityManager -> entityManager.merge(applicationModuleEntity));
}
@Override
public String addApplicationModule(ApplicationModule applicationModule, String gatewayId) throws AppCatalogException {
return saveApplicationModuleData(applicationModule, gatewayId);
}
@Override
public String addApplicationInterface(ApplicationInterfaceDescription applicationInterfaceDescription, String gatewayId) throws AppCatalogException {
return saveApplicationInterfaceDescriptorData(applicationInterfaceDescription, gatewayId);
}
@Override
public void addApplicationModuleMapping(String moduleId, String interfaceId) throws AppCatalogException {
Mapper mapper = ObjectMapperSingleton.getInstance();
ApplicationModule applicationModule = getApplicationModule(moduleId);
ApplicationInterfaceDescription applicationInterfaceDescription = getApplicationInterface(interfaceId);
ApplicationModuleEntity applicationModuleEntity = mapper.map(applicationModule, ApplicationModuleEntity.class);
ApplicationInterfaceEntity applicationInterfaceEntity = mapper.map(applicationInterfaceDescription, ApplicationInterfaceEntity.class);
AppModuleMappingEntity appModuleMappingEntity = new AppModuleMappingEntity();
appModuleMappingEntity.setModuleId(moduleId);
appModuleMappingEntity.setInterfaceId(interfaceId);
appModuleMappingEntity.setApplicationModule(applicationModuleEntity);
appModuleMappingEntity.setApplicationInterface(applicationInterfaceEntity);
execute(entityManager -> entityManager.merge(appModuleMappingEntity));
}
@Override
public void updateApplicationModule(String moduleId, ApplicationModule updatedApplicationModule) throws AppCatalogException {
saveApplicationModuleData(updatedApplicationModule, null);
}
@Override
public void updateApplicationInterface(String interfaceId, ApplicationInterfaceDescription updatedApplicationInterfaceDescription) throws AppCatalogException {
saveApplicationInterfaceDescriptorData(updatedApplicationInterfaceDescription, null);
}
@Override
public ApplicationModule getApplicationModule(String moduleId) throws AppCatalogException {
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
return applicationModuleRepository.get(moduleId);
}
@Override
public ApplicationInterfaceDescription getApplicationInterface(String interfaceId) throws AppCatalogException {
return get(interfaceId);
}
@Override
public List<ApplicationModule> getApplicationModules(Map<String, String> filters) throws AppCatalogException {
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
if(filters.containsKey(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME)) {
logger.debug("Fetching Application Modules for given Application Module Name");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME, filters.get(DBConstants.ApplicationModule.APPLICATION_MODULE_NAME));
List<ApplicationModule> applicationModuleList =
applicationModuleRepository.select(QueryConstants.FIND_APPLICATION_MODULES_FOR_APPLICATION_MODULE_NAME, -1,0, queryParameters);
return applicationModuleList;
}
else {
logger.error("Unsupported field name for app module.");
throw new IllegalArgumentException("Unsupported field name for app module.");
}
}
@Override
public List<ApplicationModule> getAllApplicationModules(String gatewayId) throws AppCatalogException {
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationModule.GATEWAY_ID, gatewayId);
List<ApplicationModule> applicationModuleList = applicationModuleRepository.select(QueryConstants.FIND_APPLICATION_MODULES_FOR_GATEWAY_ID, -1, 0, queryParameters);
return applicationModuleList;
}
@Override
public List<ApplicationInterfaceDescription> getApplicationInterfaces(Map<String, String> filters) throws AppCatalogException {
if(filters.containsKey(DBConstants.ApplicationInterface.APPLICATION_NAME)) {
logger.debug("Fetching Application Interfaces for given Application Name");
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationInterface.APPLICATION_NAME, filters.get(DBConstants.ApplicationInterface.APPLICATION_NAME));
List<ApplicationInterfaceDescription> applicationInterfaceDescriptionList =
select(QueryConstants.FIND_APPLICATION_INTERFACES_FOR_APPLICATION_NAME, -1,0, queryParameters);
return applicationInterfaceDescriptionList;
}
else {
logger.error("Unsupported field name for app interface.");
throw new IllegalArgumentException("Unsupported field name for app interface.");
}
}
@Override
public List<ApplicationInterfaceDescription> getAllApplicationInterfaces(String gatewayId) throws AppCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationInterface.GATEWAY_ID, gatewayId);
List<ApplicationInterfaceDescription> applicationInterfaceDescriptionList =
select(QueryConstants.FIND_APPLICATION_INTERFACES_FOR_GATEWAY_ID, -1, 0, queryParameters);
return applicationInterfaceDescriptionList;
}
@Override
public List<ApplicationModule> getAccessibleApplicationModules(String gatewayId, List<String> accessibleAppIds, List<String> accessibleCompHostIds) throws AppCatalogException {
if (accessibleAppIds.isEmpty() || accessibleCompHostIds.isEmpty()) {
return Collections.emptyList();
}
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationModule.GATEWAY_ID, gatewayId);
queryParameters.put(DBConstants.ApplicationDeployment.ACCESSIBLE_APPLICATION_DEPLOYMENT_IDS, accessibleAppIds);
queryParameters.put(DBConstants.ApplicationDeployment.ACCESSIBLE_COMPUTE_HOST_IDS, accessibleCompHostIds);
List<ApplicationModule> accessibleApplicationModules =
applicationModuleRepository.select(QueryConstants.FIND_ACCESSIBLE_APPLICATION_MODULES, -1, 0, queryParameters);
return accessibleApplicationModules;
}
@Override
public List<String> getAllApplicationInterfaceIds() throws AppCatalogException {
List<String> applicationInterfaceIds = new ArrayList<>();
List<ApplicationInterfaceDescription> applicationInterfaceDescriptionList = select(QueryConstants.GET_ALL_APPLICATION_INTERFACES, 0);
if (applicationInterfaceDescriptionList != null && !applicationInterfaceDescriptionList.isEmpty()) {
logger.debug("The fetched list of Application Interfaces is not NULL or empty");
for (ApplicationInterfaceDescription applicationDeploymentDescription: applicationInterfaceDescriptionList) {
applicationInterfaceIds.add(applicationDeploymentDescription.getApplicationInterfaceId());
}
}
return applicationInterfaceIds;
}
@Override
public List<InputDataObjectType> getApplicationInputs(String interfaceId) throws AppCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationInput.APPLICATION_INTERFACE_ID, interfaceId);
ApplicationInputRepository applicationInputRepository = new ApplicationInputRepository();
List<InputDataObjectType> applicationInputsList =
applicationInputRepository.select(QueryConstants.FIND_APPLICATION_INPUTS, -1, 0, queryParameters);
return applicationInputsList;
}
@Override
public List<OutputDataObjectType> getApplicationOutputs(String interfaceId) throws AppCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ApplicationOutput.APPLICATION_INTERFACE_ID, interfaceId);
ApplicationOutputRepository applicationOutputRepository = new ApplicationOutputRepository();
List<OutputDataObjectType> applicationOutputsList =
applicationOutputRepository.select(QueryConstants.FIND_APPLICATION_OUTPUTS, -1, 0, queryParameters);
return applicationOutputsList;
}
@Override
public boolean removeApplicationInterface(String interfaceId) throws AppCatalogException {
return delete(interfaceId);
}
@Override
public boolean removeApplicationModule(String moduleId) throws AppCatalogException {
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
return applicationModuleRepository.delete(moduleId);
}
@Override
public boolean isApplicationInterfaceExists(String interfaceId) throws AppCatalogException {
return isExists(interfaceId);
}
@Override
public boolean isApplicationModuleExists(String moduleId) throws AppCatalogException {
ApplicationModuleRepository applicationModuleRepository = new ApplicationModuleRepository();
return applicationModuleRepository.isExists(moduleId);
}
}
| 879 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/DataMovementRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.data.movement.DataMovementInterface;
import org.apache.airavata.registry.core.entities.appcatalog.DataMovementInterfaceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.DataMovementInterfacePK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.dozer.Mapper;
public class DataMovementRepository extends AppCatAbstractRepository<DataMovementInterface, DataMovementInterfaceEntity, DataMovementInterfacePK> {
public DataMovementRepository() {
super(DataMovementInterface.class, DataMovementInterfaceEntity.class);
}
public String addDataMovementProtocol(String resourceId, DataMovementInterface dataMovementInterface) {
Mapper mapper = ObjectMapperSingleton.getInstance();
DataMovementInterfaceEntity dataMovementInterfaceEntity = mapper.map(dataMovementInterface, DataMovementInterfaceEntity.class);
dataMovementInterfaceEntity.setComputeResourceId(resourceId);
execute(entityManager -> entityManager.merge(dataMovementInterfaceEntity));
return dataMovementInterfaceEntity.getDataMovementInterfaceId();
}
}
| 880 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationInputRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.application.io.InputDataObjectType;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationInputEntity;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationInputPK;
public class ApplicationInputRepository extends AppCatAbstractRepository<InputDataObjectType, ApplicationInputEntity, ApplicationInputPK> {
public ApplicationInputRepository () {
super(InputDataObjectType.class, ApplicationInputEntity.class);
}
}
| 881 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/UserResourceProfileRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserResourceProfile;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserStoragePreference;
import org.apache.airavata.registry.core.entities.appcatalog.UserComputeResourcePreferencePK;
import org.apache.airavata.registry.core.entities.appcatalog.UserResourceProfileEntity;
import org.apache.airavata.registry.core.entities.appcatalog.UserResourceProfilePK;
import org.apache.airavata.registry.core.entities.appcatalog.UserStoragePreferencePK;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.airavata.registry.cpi.CompositeIdentifier;
import org.apache.airavata.registry.cpi.UsrResourceProfile;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.*;
public class UserResourceProfileRepository extends AppCatAbstractRepository<UserResourceProfile, UserResourceProfileEntity, UserResourceProfilePK> implements UsrResourceProfile {
private final static Logger logger = LoggerFactory.getLogger(UserResourceProfileRepository.class);
public UserResourceProfileRepository() {
super(UserResourceProfile.class, UserResourceProfileEntity.class);
}
protected String saveUserResourceProfileData(UserResourceProfile userResourceProfile) throws AppCatalogException {
UserResourceProfileEntity userResourceProfileEntity = saveUserResourceProfile(userResourceProfile);
return userResourceProfileEntity.getUserId();
}
protected UserResourceProfileEntity saveUserResourceProfile(UserResourceProfile userResourceProfile) throws AppCatalogException {
String userId = userResourceProfile.getUserId();
String gatewayId = userResourceProfile.getGatewayID();
Mapper mapper = ObjectMapperSingleton.getInstance();
UserResourceProfileEntity userResourceProfileEntity = mapper.map(userResourceProfile, UserResourceProfileEntity.class);
if (userResourceProfileEntity.getUserComputeResourcePreferences() != null) {
logger.debug("Populating the Primary Key UserComputeResourcePreferences objects for the User Resource Profile");
userResourceProfileEntity.getUserComputeResourcePreferences().forEach(userComputeResourcePreferenceEntity -> { userComputeResourcePreferenceEntity.setUserId(userId);
userComputeResourcePreferenceEntity.setGatewayId(gatewayId); });
}
if (userResourceProfileEntity.getUserStoragePreferences() != null) {
logger.debug("Populating the Primary Key UserStoragePreferences objects for the User Resource Profile");
userResourceProfileEntity.getUserStoragePreferences().forEach(userStoragePreferenceEntity -> { userStoragePreferenceEntity.setUserId(userId);
userStoragePreferenceEntity.setGatewayId(gatewayId); });
}
if (!isUserResourceProfileExists(userId, gatewayId)) {
logger.debug("Checking if the User Resource Profile already exists");
userResourceProfileEntity.setCreationTime(new Timestamp(System.currentTimeMillis()));
}
userResourceProfileEntity.setUpdateTime(new Timestamp(System.currentTimeMillis()));
return execute(entityManager -> entityManager.merge(userResourceProfileEntity));
}
@Override
public String addUserResourceProfile(UserResourceProfile userResourceProfile) throws AppCatalogException {
return saveUserResourceProfileData(userResourceProfile);
}
@Override
public void updateUserResourceProfile(String userId, String gatewayId, UserResourceProfile updatedProfile) throws AppCatalogException {
saveUserResourceProfileData(updatedProfile);
}
@Override
public UserResourceProfile getUserResourceProfile(String userId, String gatewayId) throws AppCatalogException {
UserResourceProfilePK userResourceProfilePK = new UserResourceProfilePK();
userResourceProfilePK.setUserId(userId);
userResourceProfilePK.setGatewayId(gatewayId);
UserResourceProfile userResourceProfile = get(userResourceProfilePK);
return userResourceProfile;
}
@Override
public UserComputeResourcePreference getUserComputeResourcePreference(String userId, String gatewayId, String hostId) throws AppCatalogException {
UserComputeResourcePreferenceRepository userComputeResourcePreferenceRepository = new UserComputeResourcePreferenceRepository();
UserComputeResourcePreferencePK userComputeResourcePreferencePK = new UserComputeResourcePreferencePK();
userComputeResourcePreferencePK.setUserId(userId);
userComputeResourcePreferencePK.setGatewayId(gatewayId);
userComputeResourcePreferencePK.setComputeResourceId(hostId);
UserComputeResourcePreference userComputeResourcePreference = userComputeResourcePreferenceRepository.get(userComputeResourcePreferencePK);
return userComputeResourcePreference;
}
@Override
public UserStoragePreference getUserStoragePreference(String userId, String gatewayId, String storageId) throws AppCatalogException {
UserStoragePreferenceRepository userStoragePreferenceRepository = new UserStoragePreferenceRepository();
UserStoragePreferencePK userStoragePreferencePK = new UserStoragePreferencePK();
userStoragePreferencePK.setUserId(userId);
userStoragePreferencePK.setGatewayId(gatewayId);
userStoragePreferencePK.setStorageResourceId(storageId);
UserStoragePreference userStoragePreference = userStoragePreferenceRepository.get(userStoragePreferencePK);
return userStoragePreference;
}
@Override
public List<UserResourceProfile> getAllUserResourceProfiles() throws AppCatalogException {
List<UserResourceProfile> userResourceProfileList = select(QueryConstants.GET_ALL_USER_RESOURCE_PROFILE, 0);
return userResourceProfileList;
}
@Override
public List<UserComputeResourcePreference> getAllUserComputeResourcePreferences(String userId, String gatewayId) throws AppCatalogException {
UserComputeResourcePreferenceRepository userComputeResourcePreferenceRepository = new UserComputeResourcePreferenceRepository();
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.UserComputeResourcePreference.USER_ID, userId);
queryParameters.put(DBConstants.UserComputeResourcePreference.GATEWAY_ID, gatewayId);
List<UserComputeResourcePreference> userComputeResourcePreferenceList =
userComputeResourcePreferenceRepository.select(QueryConstants.GET_ALL_USER_COMPUTE_RESOURCE_PREFERENCE, -1, 0, queryParameters);
return userComputeResourcePreferenceList;
}
@Override
public List<UserStoragePreference> getAllUserStoragePreferences(String userId, String gatewayId) throws AppCatalogException {
UserStoragePreferenceRepository userStoragePreferenceRepository = new UserStoragePreferenceRepository();
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.UserStoragePreference.USER_ID, userId);
queryParameters.put(DBConstants.UserStoragePreference.GATEWAY_ID, gatewayId);
List<UserStoragePreference> userStoragePreferenceList =
userStoragePreferenceRepository.select(QueryConstants.GET_ALL_USER_STORAGE_PREFERENCE, -1, 0, queryParameters);
return userStoragePreferenceList;
}
@Override
public List<String> getGatewayProfileIds(String gatewayName) throws AppCatalogException {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.UserResourceProfile.GATEWAY_ID, gatewayName);
List<UserResourceProfile> userResourceProfileList = select(QueryConstants.GET_ALL_GATEWAY_ID, -1, 0, queryParameters);
List<String> gatewayIdList = new ArrayList<>();
for (UserResourceProfile userResourceProfile : userResourceProfileList) {
gatewayIdList.add(userResourceProfile.getGatewayID());
}
return gatewayIdList;
}
@Override
public String getUserNamefromID(String userId, String gatewayID) throws AppCatalogException {
return userId;
}
@Override
public boolean removeUserResourceProfile(String userId, String gatewayId) throws AppCatalogException {
UserResourceProfilePK userResourceProfilePK = new UserResourceProfilePK();
userResourceProfilePK.setUserId(userId);
userResourceProfilePK.setGatewayId(gatewayId);
return delete(userResourceProfilePK);
}
@Override
public boolean removeUserComputeResourcePreferenceFromGateway(String userId, String gatewayId, String preferenceId) throws AppCatalogException {
UserComputeResourcePreferenceRepository userComputeResourcePreferenceRepository = new UserComputeResourcePreferenceRepository();
UserComputeResourcePreferencePK userComputeResourcePreferencePK = new UserComputeResourcePreferencePK();
userComputeResourcePreferencePK.setUserId(userId);
userComputeResourcePreferencePK.setGatewayId(gatewayId);
userComputeResourcePreferencePK.setComputeResourceId(preferenceId);
return userComputeResourcePreferenceRepository.delete(userComputeResourcePreferencePK);
}
@Override
public boolean removeUserDataStoragePreferenceFromGateway(String userId, String gatewayId, String preferenceId) throws AppCatalogException {
UserStoragePreferenceRepository userStoragePreferenceRepository = new UserStoragePreferenceRepository();
UserStoragePreferencePK userStoragePreferencePK = new UserStoragePreferencePK();
userStoragePreferencePK.setUserId(userId);
userStoragePreferencePK.setGatewayId(gatewayId);
userStoragePreferencePK.setStorageResourceId(preferenceId);
return userStoragePreferenceRepository.delete(userStoragePreferencePK);
}
@Override
public boolean isUserResourceProfileExists(String userId, String gatewayId) throws AppCatalogException {
UserResourceProfilePK userResourceProfilePK = new UserResourceProfilePK();
userResourceProfilePK.setUserId(userId);
userResourceProfilePK.setGatewayId(gatewayId);
return isExists(userResourceProfilePK);
}
@Override
public boolean isUserComputeResourcePreferenceExists(String userId, String gatewayId, String preferenceId) throws AppCatalogException {
UserComputeResourcePreferenceRepository userComputeResourcePreferenceRepository = new UserComputeResourcePreferenceRepository();
UserComputeResourcePreferencePK userComputeResourcePreferencePK = new UserComputeResourcePreferencePK();
userComputeResourcePreferencePK.setUserId(userId);
userComputeResourcePreferencePK.setGatewayId(gatewayId);
userComputeResourcePreferencePK.setComputeResourceId(preferenceId);
return userComputeResourcePreferenceRepository.isExists(userComputeResourcePreferencePK);
}
public static Logger getLogger() {
return logger;
}
}
| 882 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/GatewayGroupsRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.gatewaygroups.GatewayGroups;
import org.apache.airavata.registry.core.entities.appcatalog.GatewayGroupsEntity;
public class GatewayGroupsRepository extends AppCatAbstractRepository<GatewayGroups, GatewayGroupsEntity, String> {
public GatewayGroupsRepository() {
super(GatewayGroups.class, GatewayGroupsEntity.class);
}
}
| 883 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ParsingTemplateRepository.java | package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.parser.ParsingTemplate;
import org.apache.airavata.registry.core.entities.appcatalog.ParsingTemplateEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.QueryConstants;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ParsingTemplateRepository extends AppCatAbstractRepository<ParsingTemplate, ParsingTemplateEntity, String> {
public ParsingTemplateRepository() {
super(ParsingTemplate.class, ParsingTemplateEntity.class);
}
public List<ParsingTemplate> getParsingTemplatesForApplication(String applicationInterfaceId) {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ParsingTemplate.APPLICATION_INTERFACE_ID, applicationInterfaceId);
return select(QueryConstants.FIND_PARSING_TEMPLATES_FOR_APPLICATION_INTERFACE_ID, -1, 0, queryParameters);
}
public List<ParsingTemplate> getAllParsingTemplates(String gatewayId) {
Map<String, Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ParsingTemplate.GATEWAY_ID, gatewayId);
return select(QueryConstants.FIND_ALL_PARSING_TEMPLATES_FOR_GATEWAY_ID, -1, 0, queryParameters);
}
}
| 884 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourcePrefRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourcePreferenceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourcePreferencePK;
import org.apache.airavata.registry.core.entities.appcatalog.SSHAccountProvisionerConfiguration;
import java.util.HashMap;
import java.util.Map;
public class ComputeResourcePrefRepository extends AppCatAbstractRepository<ComputeResourcePreference, ComputeResourcePreferenceEntity, ComputeResourcePreferencePK> {
public ComputeResourcePrefRepository() {
super(ComputeResourcePreference.class, ComputeResourcePreferenceEntity.class);
}
public Map<String,String> getsshAccountProvisionerConfig(String gatewayId, String hostId) {
ComputeResourcePreferencePK computeResourcePreferencePK = new ComputeResourcePreferencePK();
computeResourcePreferencePK.setGatewayId(gatewayId);
computeResourcePreferencePK.setComputeResourceId(hostId);
ComputeResourcePreferenceEntity computeResourcePreferenceEntity = execute(entityManager -> entityManager
.find(ComputeResourcePreferenceEntity.class, computeResourcePreferencePK));
if (computeResourcePreferenceEntity.getSshAccountProvisionerConfigurations()!= null && !computeResourcePreferenceEntity.getSshAccountProvisionerConfigurations().isEmpty()){
Map<String,String> sshAccountProvisionerConfigurations = new HashMap<>();
for (SSHAccountProvisionerConfiguration config : computeResourcePreferenceEntity.getSshAccountProvisionerConfigurations()){
sshAccountProvisionerConfigurations.put(config.getConfigName(), config.getConfigValue());
}
return sshAccountProvisionerConfigurations;
}
return null;
}
}
| 885 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/StoragePrefRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.gatewayprofile.StoragePreference;
import org.apache.airavata.registry.core.entities.appcatalog.StoragePreferenceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.StoragePreferencePK;
public class StoragePrefRepository extends AppCatAbstractRepository<StoragePreference, StoragePreferenceEntity, StoragePreferencePK> {
public StoragePrefRepository() {
super(StoragePreference.class, StoragePreferenceEntity.class);
}
}
| 886 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationOutputRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.application.io.OutputDataObjectType;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationOutputEntity;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationOutputPK;
public class ApplicationOutputRepository extends AppCatAbstractRepository<OutputDataObjectType, ApplicationOutputEntity, ApplicationOutputPK> {
public ApplicationOutputRepository () {
super(OutputDataObjectType.class, ApplicationOutputEntity.class);
}
}
| 887 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/AppCatAbstractRepository.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.registry.core.repositories.AbstractRepository;
import org.apache.airavata.registry.core.utils.JPAUtil.AppCatalogJPAUtils;
import javax.persistence.EntityManager;
public class AppCatAbstractRepository<T, E, Id> extends AbstractRepository<T, E, Id> {
public AppCatAbstractRepository(Class<T> thriftGenericClass, Class<E> dbEntityGenericClass) {
super(thriftGenericClass, dbEntityGenericClass);
}
@Override
protected EntityManager getEntityManager() {
return AppCatalogJPAUtils.getEntityManager();
}
}
| 888 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ComputeResourcePolicyRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.groupresourceprofile.ComputeResourcePolicy;
import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourcePolicyEntity;
/**
* Created by skariyat on 2/10/18.
*/
public class ComputeResourcePolicyRepository extends AppCatAbstractRepository<ComputeResourcePolicy, ComputeResourcePolicyEntity, String>{
public ComputeResourcePolicyRepository() {
super(ComputeResourcePolicy.class, ComputeResourcePolicyEntity.class);
}
}
| 889 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ParserOutputRepository.java | package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.parser.ParserOutput;
import org.apache.airavata.registry.core.entities.appcatalog.ParserOutputEntity;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ParserOutputRepository extends AppCatAbstractRepository<ParserOutput, ParserOutputEntity, String> {
private final static Logger logger = LoggerFactory.getLogger(ParserInputRepository.class);
public ParserOutputRepository() {
super(ParserOutput.class, ParserOutputEntity.class);
}
public ParserOutput getParserOutput(String outputId) throws AppCatalogException {
try {
return super.get(outputId);
} catch (Exception e) {
logger.error("Failed to fetch parser output with id " + outputId, e);
throw new AppCatalogException("Failed to fetch parser output with id " + outputId, e);
}
}
} | 890 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/StorageResourceRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.storageresource.StorageResourceDescription;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.model.data.movement.DataMovementInterface;
import org.apache.airavata.registry.core.entities.appcatalog.DataMovementInterfaceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.StorageInterfaceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.StorageInterfacePK;
import org.apache.airavata.registry.core.entities.appcatalog.StorageResourceEntity;
import org.apache.airavata.registry.core.utils.AppCatalogUtils;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.airavata.registry.cpi.StorageResource;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by skariyat on 3/12/18.
*/
public class StorageResourceRepository extends AppCatAbstractRepository<StorageResourceDescription, StorageResourceEntity, String> implements StorageResource {
private final static Logger logger = LoggerFactory.getLogger(StorageResourceRepository.class);
public StorageResourceRepository() {
super(StorageResourceDescription.class, StorageResourceEntity.class);
}
@Override
public String addStorageResource(StorageResourceDescription description) throws AppCatalogException {
try {
final String storageResourceId = AppCatalogUtils.getID(description.getHostName());
if ("".equals(description.getStorageResourceId()) || airavata_commonsConstants.DEFAULT_ID.equals(description.getStorageResourceId())) {
description.setStorageResourceId(storageResourceId);
}
description.setCreationTime(System.currentTimeMillis());
if (description.getDataMovementInterfaces() != null) {
description.getDataMovementInterfaces().stream().forEach(dm -> dm.setStorageResourceId(description.getStorageResourceId()));
}
StorageResourceDescription storageResourceDescription = create(description);
return storageResourceDescription.getStorageResourceId();
} catch (Exception e) {
logger.error("Error while saving storage resource. StorageResourceId : " + description.getStorageResourceId() + ""
+ " HostName : " + description.getHostName(), e);
throw new AppCatalogException("Error while saving storage resource. StorageResourceId : " + description.getStorageResourceId() + ""
+ " HostName : " + description.getHostName(), e);
}
}
@Override
public void updateStorageResource(String storageResourceId, StorageResourceDescription updatedStorageResource) throws AppCatalogException {
try {
updatedStorageResource.setUpdateTime(System.currentTimeMillis());
if (updatedStorageResource.getDataMovementInterfaces() != null) {
updatedStorageResource.getDataMovementInterfaces().stream().forEach(dm -> dm.setStorageResourceId(updatedStorageResource.getStorageResourceId()));
}
update(updatedStorageResource);
} catch (Exception e) {
logger.error("Error while updating storage resource. StorageResourceId : " + updatedStorageResource.getStorageResourceId() + ""
+ " HostName : " + updatedStorageResource.getHostName(), e);
throw new AppCatalogException("Error while updating storage resource. StorageResourceId : " + updatedStorageResource.getStorageResourceId() + ""
+ " HostName : " + updatedStorageResource.getHostName(), e);
}
}
@Override
public StorageResourceDescription getStorageResource(String resourceId) throws AppCatalogException {
try {
return get(resourceId);
} catch (Exception e) {
logger.error("Error while retrieving storage resource. Resource Id: " + resourceId , e);
throw new AppCatalogException("Error while retrieving storage resource. Resource Id: " + resourceId, e);
}
}
@Override
public List<StorageResourceDescription> getStorageResourceList(Map<String, String> filters) throws AppCatalogException {
try {
if (filters.containsKey(DBConstants.StorageResource.HOST_NAME)) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ComputeResource.HOST_NAME, filters.get(DBConstants.StorageResource.HOST_NAME));
List<StorageResourceDescription> storageResourceDescriptionList = select(QueryConstants.FIND_STORAGE_RESOURCE, -1, 0, queryParameters);
return storageResourceDescriptionList;
} else {
logger.error("Unsupported field name for compute resource. " + filters.get(DBConstants.StorageResource.HOST_NAME));
throw new IllegalArgumentException("Unsupported field name for compute resource. " + filters.get(DBConstants.StorageResource.HOST_NAME));
}
} catch (Exception e) {
logger.error("Error while retrieving storage resource list", e);
throw new AppCatalogException("Error while retrieving storage resource list", e);
}
}
@Override
public List<StorageResourceDescription> getAllStorageResourceList() throws AppCatalogException {
try {
return select(QueryConstants.FIND_ALL_STORAGE_RESOURCES, 0);
} catch (Exception e) {
logger.error("Error while retrieving storage resource list", e);
throw new AppCatalogException("Error while retrieving storage resource list", e);
}
}
@Override
public Map<String, String> getAllStorageResourceIdList() throws AppCatalogException {
try {
Map<String, String> storageResourceMap = new HashMap<String, String>();
List<StorageResourceDescription> storageResourceDescriptionList = select(QueryConstants.FIND_ALL_STORAGE_RESOURCES, 0);
return getStorageResourceMap(storageResourceDescriptionList);
} catch (Exception e) {
logger.error("Error while retrieving storage resource ID map", e);
throw new AppCatalogException("Error while retrieving storage resource ID map", e);
}
}
@Override
public Map<String, String> getAvailableStorageResourceIdList() throws AppCatalogException {
try {
Map<String, String> storageResourceMap = new HashMap<String, String>();
List<StorageResourceDescription> storageResourceDescriptionList = select(QueryConstants.FIND_ALL_AVAILABLE_STORAGE_RESOURCES, 0);
return getStorageResourceMap(storageResourceDescriptionList);
} catch (Exception e) {
logger.error("Error while retrieving available storage resource ID map", e);
throw new AppCatalogException("Error while retrieving available storage resource ID map", e);
}
}
@Override
public boolean isStorageResourceExists(String resourceId) throws AppCatalogException {
try {
return isExists(resourceId);
} catch (Exception e) {
logger.error("Error while retrieving storage resource. Resource ID: "+ resourceId, e);
throw new AppCatalogException("Error while retrieving storage resource. Resource ID: "+ resourceId, e);
}
}
@Override
public void removeStorageResource(String resourceId) throws AppCatalogException {
try {
delete(resourceId);
} catch (Exception e) {
logger.error("Error while removing storage resource Resource ID: "+ resourceId, e);
throw new AppCatalogException("Error while removing storage resource Resource ID: "+ resourceId, e);
}
}
public String addDataMovementInterface(DataMovementInterface dataMovementInterface) {
Mapper mapper = ObjectMapperSingleton.getInstance();
StorageInterfaceEntity storageInterfaceEntity = mapper.map(dataMovementInterface, StorageInterfaceEntity.class);
execute(entityManager -> entityManager.merge(storageInterfaceEntity));
return dataMovementInterface.getDataMovementInterfaceId();
}
@Override
public void removeDataMovementInterface(String storageResourceId, String dataMovementInterfaceId) throws AppCatalogException {
try {
StorageInterfacePK storageInterfacePK = new StorageInterfacePK();
storageInterfacePK.setDataMovementInterfaceId(dataMovementInterfaceId);
storageInterfacePK.setStorageResourceId(storageResourceId);
execute(entityManager -> {
StorageInterfaceEntity entity = entityManager.find(StorageInterfaceEntity.class, storageInterfacePK);
entityManager.remove(entity);
return entity;
});
} catch (Exception e) {
logger.error("Error removing storage data movement interface. StorageResourceId: "+ storageResourceId + ""
+ " DataMovementInterfaceId: "+ dataMovementInterfaceId, e);
throw new AppCatalogException("Error removing storage data movement interface. StorageResourceId: "+ storageResourceId + ""
+ " DataMovementInterfaceId: "+ dataMovementInterfaceId, e);
}
}
private Map<String, String> getStorageResourceMap(List<StorageResourceDescription> storageResourceDescriptionList) {
Map<String, String> storageResourceMap = new HashMap<String, String>();
if (storageResourceDescriptionList != null) {
for (StorageResourceDescription storageResourceDescription: storageResourceDescriptionList) {
storageResourceMap.put(storageResourceDescription.getStorageResourceId(), storageResourceDescription.getHostName());
}
}
return storageResourceMap;
}
}
| 891 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/UserComputeResourcePreferenceRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.userresourceprofile.UserComputeResourcePreference;
import org.apache.airavata.registry.core.entities.appcatalog.UserComputeResourcePreferenceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.UserComputeResourcePreferencePK;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserComputeResourcePreferenceRepository extends AppCatAbstractRepository<UserComputeResourcePreference, UserComputeResourcePreferenceEntity, UserComputeResourcePreferencePK> {
public UserComputeResourcePreferenceRepository() {
super(UserComputeResourcePreference.class, UserComputeResourcePreferenceEntity.class);
}
}
| 892 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/JobSubmissionInterfaceRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface;
import org.apache.airavata.registry.core.entities.appcatalog.JobSubmissionInterfaceEntity;
import org.apache.airavata.registry.core.entities.appcatalog.JobSubmissionInterfacePK;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.dozer.Mapper;
public class JobSubmissionInterfaceRepository extends AppCatAbstractRepository<JobSubmissionInterface, JobSubmissionInterfaceEntity, JobSubmissionInterfacePK> {
public JobSubmissionInterfaceRepository() {
super(JobSubmissionInterface.class, JobSubmissionInterfaceEntity.class);
}
public String addJobSubmission(String computeResourceId, JobSubmissionInterface jobSubmissionInterface) {
Mapper mapper = ObjectMapperSingleton.getInstance();
JobSubmissionInterfaceEntity jobSubmissionInterfaceEntity = mapper.map(jobSubmissionInterface, JobSubmissionInterfaceEntity.class);
jobSubmissionInterfaceEntity.setComputeResourceId(computeResourceId);
execute(entityManager -> entityManager.merge(jobSubmissionInterfaceEntity));
return jobSubmissionInterfaceEntity.getJobSubmissionInterfaceId();
}
}
| 893 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/GwyResourceProfileRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.appcatalog.gatewayprofile.ComputeResourcePreference;
import org.apache.airavata.model.appcatalog.gatewayprofile.GatewayResourceProfile;
import org.apache.airavata.model.appcatalog.gatewayprofile.StoragePreference;
import org.apache.airavata.registry.core.entities.appcatalog.*;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.ObjectMapperSingleton;
import org.apache.airavata.registry.core.utils.QueryConstants;
import org.apache.airavata.registry.cpi.AppCatalogException;
import org.apache.airavata.registry.cpi.GwyResourceProfile;
import org.dozer.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class GwyResourceProfileRepository extends AppCatAbstractRepository<GatewayResourceProfile, GatewayProfileEntity, String> implements GwyResourceProfile {
private final static Logger logger = LoggerFactory.getLogger(GwyResourceProfileRepository.class);
public GwyResourceProfileRepository() {
super(GatewayResourceProfile.class, GatewayProfileEntity.class);
}
@Override
public String addGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) {
return updateGatewayResourceProfile(gatewayResourceProfile);
}
@Override
public void updateGatewayResourceProfile(String gatewayId, GatewayResourceProfile updatedProfile) throws AppCatalogException {
updateGatewayResourceProfile(updatedProfile);
}
public String updateGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile) {
String gatewayId = gatewayResourceProfile.getGatewayID();
Mapper mapper = ObjectMapperSingleton.getInstance();
GatewayProfileEntity gatewayProfileEntity = mapper.map(gatewayResourceProfile, GatewayProfileEntity.class);
if (get(gatewayId) != null) {
gatewayProfileEntity.setUpdateTime(AiravataUtils.getCurrentTimestamp());
}
else {
gatewayProfileEntity.setCreationTime(AiravataUtils.getCurrentTimestamp());
}
if (gatewayProfileEntity.getComputeResourcePreferences() != null)
gatewayProfileEntity.getComputeResourcePreferences().forEach(pref->pref.setGatewayId(gatewayId));
if (gatewayProfileEntity.getStoragePreferences() != null)
gatewayProfileEntity.getStoragePreferences().forEach(pref->pref.setGatewayId(gatewayId));
GatewayProfileEntity persistedCopy = execute(entityManager -> entityManager.merge(gatewayProfileEntity));
List<ComputeResourcePreference> computeResourcePreferences = gatewayResourceProfile.getComputeResourcePreferences();
if (computeResourcePreferences != null && !computeResourcePreferences.isEmpty()) {
for (ComputeResourcePreference preference : computeResourcePreferences ) {
if (preference.getSshAccountProvisionerConfig() != null && !preference.getSshAccountProvisionerConfig().isEmpty()){
ComputeResourcePreferenceEntity computeResourcePreferenceEntity = mapper.map(preference, ComputeResourcePreferenceEntity.class);
computeResourcePreferenceEntity.setGatewayId(gatewayId);
List<SSHAccountProvisionerConfiguration> configurations = new ArrayList<>();
for (String sshAccountProvisionerConfigName : preference.getSshAccountProvisionerConfig().keySet()) {
String value = preference.getSshAccountProvisionerConfig().get(sshAccountProvisionerConfigName);
configurations.add(new SSHAccountProvisionerConfiguration(sshAccountProvisionerConfigName, value, computeResourcePreferenceEntity));
}
computeResourcePreferenceEntity.setSshAccountProvisionerConfigurations(configurations);
execute(entityManager -> entityManager.merge(computeResourcePreferenceEntity));
}
}
}
return persistedCopy.getGatewayId();
}
@Override
public GatewayResourceProfile getGatewayProfile(String gatewayId) {
GatewayResourceProfile gatewayResourceProfile = get(gatewayId);
if (gatewayResourceProfile.getComputeResourcePreferences() != null && !gatewayResourceProfile.getComputeResourcePreferences().isEmpty()){
for (ComputeResourcePreference preference: gatewayResourceProfile.getComputeResourcePreferences()){
ComputeResourcePrefRepository computeResourcePrefRepository = new ComputeResourcePrefRepository();
preference.setSshAccountProvisionerConfig(computeResourcePrefRepository.getsshAccountProvisionerConfig(gatewayResourceProfile.getGatewayID(), preference.getComputeResourceId()));
}
}
return gatewayResourceProfile;
}
@Override
public boolean removeGatewayResourceProfile(String gatewayId) throws AppCatalogException {
return delete(gatewayId);
}
@Override
public List<GatewayResourceProfile> getAllGatewayProfiles() {
List<GatewayResourceProfile> gwyResourceProfileList = new ArrayList<GatewayResourceProfile>();
List<GatewayResourceProfile> gatewayResourceProfileList = select(QueryConstants.FIND_ALL_GATEWAY_PROFILES, 0);
if (gatewayResourceProfileList != null && !gatewayResourceProfileList.isEmpty()) {
for (GatewayResourceProfile gatewayResourceProfile: gatewayResourceProfileList) {
if (gatewayResourceProfile.getComputeResourcePreferences() != null && !gatewayResourceProfile.getComputeResourcePreferences().isEmpty()){
for (ComputeResourcePreference preference: gatewayResourceProfile.getComputeResourcePreferences()){
ComputeResourcePrefRepository computeResourcePrefRepository = new ComputeResourcePrefRepository();
preference.setSshAccountProvisionerConfig(computeResourcePrefRepository.getsshAccountProvisionerConfig(gatewayResourceProfile.getGatewayID(), preference.getComputeResourceId()));
}
}
}
}
return gatewayResourceProfileList;
}
@Override
public boolean removeComputeResourcePreferenceFromGateway(String gatewayId, String preferenceId) {
ComputeResourcePreferencePK computeResourcePreferencePK = new ComputeResourcePreferencePK();
computeResourcePreferencePK.setGatewayId(gatewayId);
computeResourcePreferencePK.setComputeResourceId(preferenceId);
(new ComputeResourcePrefRepository()).delete(computeResourcePreferencePK);
return true;
}
@Override
public boolean removeDataStoragePreferenceFromGateway(String gatewayId, String preferenceId) {
StoragePreferencePK storagePreferencePK = new StoragePreferencePK();
storagePreferencePK.setGatewayId(gatewayId);
storagePreferencePK.setStorageResourceId(preferenceId);
(new StoragePrefRepository()).delete(storagePreferencePK);
return true;
}
@Override
public boolean isGatewayResourceProfileExists(String gatewayId) throws AppCatalogException {
return isExists(gatewayId);
}
@Override
public ComputeResourcePreference getComputeResourcePreference(String gatewayId, String hostId) {
ComputeResourcePreferencePK computeResourcePreferencePK = new ComputeResourcePreferencePK();
computeResourcePreferencePK.setGatewayId(gatewayId);
computeResourcePreferencePK.setComputeResourceId(hostId);
ComputeResourcePrefRepository computeResourcePrefRepository = new ComputeResourcePrefRepository();
ComputeResourcePreference computeResourcePreference = computeResourcePrefRepository.get(computeResourcePreferencePK);
computeResourcePreference.setSshAccountProvisionerConfig(computeResourcePrefRepository.getsshAccountProvisionerConfig(gatewayId, hostId));
return computeResourcePreference;
}
@Override
public StoragePreference getStoragePreference(String gatewayId, String storageId){
StoragePreferencePK storagePreferencePK = new StoragePreferencePK();
storagePreferencePK.setStorageResourceId(storageId);
storagePreferencePK.setGatewayId(gatewayId);
return (new StoragePrefRepository()).get(storagePreferencePK);
}
@Override
public List<ComputeResourcePreference> getAllComputeResourcePreferences(String gatewayId) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.ComputeResourcePreference.GATEWAY_ID, gatewayId);
ComputeResourcePrefRepository computeResourcePrefRepository = new ComputeResourcePrefRepository();
List<ComputeResourcePreference> preferences = computeResourcePrefRepository.select(QueryConstants.FIND_ALL_COMPUTE_RESOURCE_PREFERENCES, -1, 0, queryParameters);
if (preferences != null && !preferences.isEmpty()) {
for (ComputeResourcePreference preference: preferences){
preference.setSshAccountProvisionerConfig(computeResourcePrefRepository.getsshAccountProvisionerConfig(gatewayId, preference.getComputeResourceId()));
}
}
return preferences;
}
@Override
public List<StoragePreference> getAllStoragePreferences(String gatewayId) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.StorageResourcePreference.GATEWAY_ID, gatewayId);
return (new StoragePrefRepository()).select(QueryConstants.FIND_ALL_STORAGE_RESOURCE_PREFERENCES, -1, 0, queryParameters);
}
@Override
public List<String> getGatewayProfileIds(String gatewayName) throws AppCatalogException {
//not used anywhere. Skipping the implementation
return null;
}
}
| 894 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/BatchQueuePolicyRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.groupresourceprofile.BatchQueueResourcePolicy;
import org.apache.airavata.registry.core.entities.appcatalog.BatchQueueResourcePolicyEntity;
/**
* Created by skariyat on 2/10/18.
*/
public class BatchQueuePolicyRepository extends AppCatAbstractRepository<BatchQueueResourcePolicy, BatchQueueResourcePolicyEntity, String> {
public BatchQueuePolicyRepository() {
super(BatchQueueResourcePolicy.class, BatchQueueResourcePolicyEntity.class);
}
}
| 895 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/GroupResourceProfileRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.common.utils.AiravataUtils;
import org.apache.airavata.model.appcatalog.groupresourceprofile.BatchQueueResourcePolicy;
import org.apache.airavata.model.appcatalog.groupresourceprofile.ComputeResourcePolicy;
import org.apache.airavata.model.appcatalog.groupresourceprofile.GroupComputeResourcePreference;
import org.apache.airavata.model.appcatalog.groupresourceprofile.GroupResourceProfile;
import org.apache.airavata.model.commons.airavata_commonsConstants;
import org.apache.airavata.registry.core.entities.appcatalog.ComputeResourceReservationEntity;
import org.apache.airavata.registry.core.entities.appcatalog.GroupComputeResourcePrefEntity;
import org.apache.airavata.registry.core.entities.appcatalog.GroupComputeResourcePrefPK;
import org.apache.airavata.registry.core.entities.appcatalog.GroupResourceProfileEntity;
import org.apache.airavata.registry.core.utils.DBConstants;
import org.apache.airavata.registry.core.utils.QueryConstants;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Created by skariyat on 2/8/18.
*/
public class GroupResourceProfileRepository extends AppCatAbstractRepository<GroupResourceProfile, GroupResourceProfileEntity, String> {
public GroupResourceProfileRepository() {
super(GroupResourceProfile.class, GroupResourceProfileEntity.class);
}
public String addGroupResourceProfile(GroupResourceProfile groupResourceProfile) {
final String groupResourceProfileId = UUID.randomUUID().toString();
groupResourceProfile.setGroupResourceProfileId(groupResourceProfileId);
groupResourceProfile.setCreationTime(System.currentTimeMillis());
updateChildren(groupResourceProfile, groupResourceProfileId);
return updateGroupResourceProfile(groupResourceProfile);
}
private void updateChildren(GroupResourceProfile groupResourceProfile, String groupResourceProfileId) {
if (groupResourceProfile.getComputePreferences() != null) {
for (GroupComputeResourcePreference groupComputeResourcePreference: groupResourceProfile.getComputePreferences()) {
groupComputeResourcePreference.setGroupResourceProfileId(groupResourceProfileId);
if (groupComputeResourcePreference.getGroupSSHAccountProvisionerConfigs() != null) {
groupComputeResourcePreference.getGroupSSHAccountProvisionerConfigs().forEach(gssh -> gssh.setGroupResourceProfileId(groupResourceProfileId));
}
if (groupComputeResourcePreference.getReservations() != null) {
groupComputeResourcePreference.getReservations().forEach(reservation -> {
if (reservation.getReservationId().trim().isEmpty() || reservation.getReservationId().equals(airavata_commonsConstants.DEFAULT_ID)) {
reservation.setReservationId(AiravataUtils.getId(reservation.getReservationName()));
}
});
}
}
}
if (groupResourceProfile.getBatchQueueResourcePolicies() != null) {
groupResourceProfile.getBatchQueueResourcePolicies().forEach(bq -> {
if (bq.getResourcePolicyId().trim().isEmpty() || bq.getResourcePolicyId().equals(airavata_commonsConstants.DEFAULT_ID)) {
bq.setResourcePolicyId(UUID.randomUUID().toString());
}
bq.setGroupResourceProfileId(groupResourceProfileId);
});
}
if (groupResourceProfile.getComputeResourcePolicies() != null) {
groupResourceProfile.getComputeResourcePolicies().forEach(cr -> {
if (cr.getResourcePolicyId().trim().isEmpty() || cr.getResourcePolicyId().equals(airavata_commonsConstants.DEFAULT_ID)) {
cr.setResourcePolicyId(UUID.randomUUID().toString());
}
cr.setGroupResourceProfileId(groupResourceProfileId);
});
}
}
public String updateGroupResourceProfile(GroupResourceProfile updatedGroupResourceProfile) {
updatedGroupResourceProfile.setUpdatedTime(System.currentTimeMillis());
updateChildren(updatedGroupResourceProfile, updatedGroupResourceProfile.getGroupResourceProfileId());
GroupResourceProfileEntity groupResourceProfileEntity = mapToEntity(updatedGroupResourceProfile);
updateChildrenEntities(groupResourceProfileEntity);
GroupResourceProfile groupResourceProfile = mergeEntity(groupResourceProfileEntity);
return groupResourceProfile.getGroupResourceProfileId();
}
private void updateChildrenEntities(GroupResourceProfileEntity groupResourceProfileEntity) {
if (groupResourceProfileEntity.getComputePreferences() != null) {
for (GroupComputeResourcePrefEntity groupComputeResourcePrefEntity : groupResourceProfileEntity.getComputePreferences()) {
// For some reason next line is needed to get OpenJPA to persist
// GroupResourceProfileEntity before GroupComputeResourcePrefEntity
groupComputeResourcePrefEntity.setGroupResourceProfile(groupResourceProfileEntity);
if (groupComputeResourcePrefEntity.getReservations() != null) {
for (ComputeResourceReservationEntity reservationEntity : groupComputeResourcePrefEntity.getReservations()) {
reservationEntity.setGroupComputeResourcePref(groupComputeResourcePrefEntity);
}
}
}
}
}
public GroupResourceProfile getGroupResourceProfile(String groupResourceProfileId) {
GroupResourceProfile groupResourceProfile = get(groupResourceProfileId);
return groupResourceProfile;
}
public boolean removeGroupResourceProfile(String groupResourceProfileId) {
return delete(groupResourceProfileId);
}
public boolean isGroupResourceProfileExists(String groupResourceProfileId) {
return isExists(groupResourceProfileId);
}
public List<GroupResourceProfile> getAllGroupResourceProfiles(String gatewayId, List<String> accessibleGroupResProfileIds) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.GroupResourceProfile.GATEWAY_ID, gatewayId);
if (accessibleGroupResProfileIds != null && !accessibleGroupResProfileIds.isEmpty()) {
queryParameters.put(DBConstants.GroupResourceProfile.ACCESSIBLE_GROUP_RESOURCE_IDS, accessibleGroupResProfileIds);
return select(QueryConstants.FIND_ACCESSIBLE_GROUP_RESOURCE_PROFILES, -1, 0, queryParameters);
} else {
return Collections.emptyList();
}
}
public boolean removeGroupComputeResourcePreference(String computeResourceId, String groupResourceProfileId) {
GroupComputeResourcePrefPK groupComputeResourcePrefPK = new GroupComputeResourcePrefPK();
groupComputeResourcePrefPK.setComputeResourceId(computeResourceId);
groupComputeResourcePrefPK.setGroupResourceProfileId(groupResourceProfileId);
return (new GrpComputePrefRepository().delete(groupComputeResourcePrefPK));
}
public boolean removeComputeResourcePolicy(String resourcePolicyId) {
return (new ComputeResourcePolicyRepository().delete(resourcePolicyId));
}
public boolean removeBatchQueueResourcePolicy(String resourcePolicyId) {
return (new BatchQueuePolicyRepository().delete(resourcePolicyId));
}
public GroupComputeResourcePreference getGroupComputeResourcePreference(String computeResourceId, String groupResourceProfileId) {
GroupComputeResourcePrefPK groupComputeResourcePrefPK = new GroupComputeResourcePrefPK();
groupComputeResourcePrefPK.setGroupResourceProfileId(groupResourceProfileId);
groupComputeResourcePrefPK.setComputeResourceId(computeResourceId);
return (new GrpComputePrefRepository().get(groupComputeResourcePrefPK));
}
public boolean isGroupComputeResourcePreferenceExists(String computeResourceId, String groupResourceProfileId) {
GroupComputeResourcePrefPK groupComputeResourcePrefPK = new GroupComputeResourcePrefPK();
groupComputeResourcePrefPK.setGroupResourceProfileId(groupResourceProfileId);
groupComputeResourcePrefPK.setComputeResourceId(computeResourceId);
return (new GrpComputePrefRepository().isExists(groupComputeResourcePrefPK));
}
public ComputeResourcePolicy getComputeResourcePolicy(String resourcePolicyId) {
return (new ComputeResourcePolicyRepository().get(resourcePolicyId));
}
public BatchQueueResourcePolicy getBatchQueueResourcePolicy(String resourcePolicyId) {
return (new BatchQueuePolicyRepository().get(resourcePolicyId));
}
public List<GroupComputeResourcePreference> getAllGroupComputeResourcePreferences(String groupResourceProfileId) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID, groupResourceProfileId);
List<GroupComputeResourcePreference> groupComputeResourcePreferenceList = (new GrpComputePrefRepository().select(QueryConstants.FIND_ALL_GROUP_COMPUTE_PREFERENCES, -1, 0, queryParameters));
return groupComputeResourcePreferenceList;
}
public List<BatchQueueResourcePolicy> getAllGroupBatchQueueResourcePolicies(String groupResourceProfileId) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID, groupResourceProfileId);
return (new BatchQueuePolicyRepository().select(QueryConstants.FIND_ALL_GROUP_BATCH_QUEUE_RESOURCE_POLICY, -1, 0, queryParameters));
}
public List<ComputeResourcePolicy> getAllGroupComputeResourcePolicies(String groupResourceProfileId) {
Map<String,Object> queryParameters = new HashMap<>();
queryParameters.put(DBConstants.GroupResourceProfile.GROUP_RESOURCE_PROFILE_ID, groupResourceProfileId);
return (new ComputeResourcePolicyRepository().select(QueryConstants.FIND_ALL_GROUP_COMPUTE_RESOURCE_POLICY, -1, 0, queryParameters));
}
}
| 896 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/ApplicationModuleRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.appdeployment.ApplicationModule;
import org.apache.airavata.registry.core.entities.appcatalog.ApplicationModuleEntity;
public class ApplicationModuleRepository extends AppCatAbstractRepository<ApplicationModule, ApplicationModuleEntity, String> {
public ApplicationModuleRepository () {
super(ApplicationModule.class, ApplicationModuleEntity.class);
}
}
| 897 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/BatchQueueRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.computeresource.BatchQueue;
import org.apache.airavata.registry.core.entities.appcatalog.BatchQueueEntity;
import org.apache.airavata.registry.core.entities.appcatalog.BatchQueuePK;
public class BatchQueueRepository extends AppCatAbstractRepository<BatchQueue, BatchQueueEntity, BatchQueuePK> {
public BatchQueueRepository() {
super(BatchQueue.class, BatchQueueEntity.class);
}
}
| 898 |
0 | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories | Create_ds/airavata/modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/repositories/appcatalog/GrpComputePrefRepository.java | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.registry.core.repositories.appcatalog;
import org.apache.airavata.model.appcatalog.groupresourceprofile.GroupComputeResourcePreference;
import org.apache.airavata.registry.core.entities.appcatalog.GroupComputeResourcePrefEntity;
import org.apache.airavata.registry.core.entities.appcatalog.GroupComputeResourcePrefPK;
/**
* Created by skariyat on 2/10/18.
*/
public class GrpComputePrefRepository extends AppCatAbstractRepository<GroupComputeResourcePreference, GroupComputeResourcePrefEntity, GroupComputeResourcePrefPK> {
public GrpComputePrefRepository() {
super(GroupComputeResourcePreference.class, GroupComputeResourcePrefEntity.class);
}
}
| 899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.