repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exception/UserException.java | src/main/java/org/ohdsi/webapi/exception/UserException.java | package org.ohdsi.webapi.exception;
public class UserException extends RuntimeException {
public UserException(String message, Throwable cause) {
super(message, cause);
}
public UserException(String message) {
super(message);
}
public UserException(Throwable ex) {
super(ex);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exception/AtlasException.java | src/main/java/org/ohdsi/webapi/exception/AtlasException.java | package org.ohdsi.webapi.exception;
public class AtlasException extends RuntimeException {
public AtlasException() {
}
public AtlasException(String message, Throwable cause) {
super(message, cause);
}
public AtlasException(String message) {
super(message);
}
public AtlasException(Throwable ex) {
super(ex);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exception/BadRequestAtlasException.java | src/main/java/org/ohdsi/webapi/exception/BadRequestAtlasException.java | package org.ohdsi.webapi.exception;
// New BadRequest class is used to avoid breaking changes with existing BadRequest class from javax.ws.rs
public class BadRequestAtlasException extends AtlasException {
public BadRequestAtlasException(String message, Throwable cause) {
super(message, cause);
}
public BadRequestAtlasException(String message) {
super(message);
}
public BadRequestAtlasException(Throwable ex) {
super(ex);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceDetails.java | src/main/java/org/ohdsi/webapi/source/SourceDetails.java | package org.ohdsi.webapi.source;
public class SourceDetails extends SourceInfo {
private String connectionString;
private String username;
private String password;
private String krbAuthMethod;
private String keyfileName;
private String krbAdminServer;
public SourceDetails(Source s) {
super(s);
setConnectionString(s.getSourceConnection());
setKeyfileName(s.getKeyfileName());
setKrbAuthMethod(s.getKrbAuthMethod().toString().toLowerCase());
setUsername(s.getUsername());
setPassword(Source.MASQUERADED_PASSWORD);
setKrbAdminServer(s.getKrbAdminServer());
}
public String getConnectionString() {
return connectionString;
}
public void setConnectionString(String connectionString) {
this.connectionString = connectionString;
}
public String getKeyfileName() {
return keyfileName;
}
public void setKeyfileName(String keyfileName) {
this.keyfileName = keyfileName;
}
public String getKrbAuthMethod() {
return krbAuthMethod;
}
public void setKrbAuthMethod(String krbAuthMethod) {
this.krbAuthMethod = krbAuthMethod;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getKrbAdminServer() {
return krbAdminServer;
}
public void setKrbAdminServer(String krbAdminServer) {
this.krbAdminServer = krbAdminServer;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceInfo.java | src/main/java/org/ohdsi/webapi/source/SourceInfo.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.source;
import java.util.Collection;
/**
*
* @author fdefalco
*/
public class SourceInfo {
public int sourceId;
public String sourceName;
public String sourceDialect;
public String sourceKey;
public Collection<SourceDaimon> daimons;
public SourceInfo(Source s) {
sourceId = s.getSourceId();
sourceName = s.getSourceName();
sourceDialect = s.getSourceDialect();
sourceKey = s.getSourceKey();
daimons = s.getDaimons();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceService.java | src/main/java/org/ohdsi/webapi/source/SourceService.java | package org.ohdsi.webapi.source;
import org.apache.commons.collections4.map.PassiveExpiringMap;
import org.jasypt.encryption.pbe.PBEStringEncryptor;
import org.jasypt.properties.PropertyValueEncryptionUtils;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.common.SourceMapKey;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.CannotGetJdbcConnectionException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import javax.annotation.PostConstruct;
import java.util.*;
import java.util.stream.Collectors;
import javax.cache.CacheManager;
import javax.cache.configuration.MutableConfiguration;
import org.ohdsi.webapi.util.CacheHelper;
import org.springframework.boot.autoconfigure.cache.JCacheManagerCustomizer;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Component;
@Service
public class SourceService extends AbstractDaoService {
@Component
public static class CachingSetup implements JCacheManagerCustomizer {
public static final String SOURCE_LIST_CACHE = "sourceList";
@Override
public void customize(CacheManager cacheManager) {
// Evict when a cohort definition is created or updated, or permissions, or tags
if (!CacheHelper.getCacheNames(cacheManager).contains(SOURCE_LIST_CACHE)) {
cacheManager.createCache(SOURCE_LIST_CACHE, new MutableConfiguration<Object, List<Source>>()
.setTypes(Object.class, (Class<List<Source>>) (Class<?>) List.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
}
}
@Value("${jasypt.encryptor.enabled}")
private boolean encryptorEnabled;
@Value("${datasource.ohdsi.schema}")
private String schema;
private Map<Source, Boolean> connectionAvailability = Collections.synchronizedMap(new PassiveExpiringMap<>(5000));
private final SourceRepository sourceRepository;
private final JdbcTemplate jdbcTemplate;
private PBEStringEncryptor defaultStringEncryptor;
private SourceAccessor sourceAccessor;
public SourceService(SourceRepository sourceRepository, JdbcTemplate jdbcTemplate, PBEStringEncryptor defaultStringEncryptor, SourceAccessor sourceAccessor) {
this.sourceRepository = sourceRepository;
this.jdbcTemplate = jdbcTemplate;
this.defaultStringEncryptor = defaultStringEncryptor;
this.sourceAccessor = sourceAccessor;
}
@PostConstruct
private void postConstruct() {
ensureSourceEncrypted();
}
public void ensureSourceEncrypted() {
if (encryptorEnabled) {
String query = "SELECT source_id, username, password FROM ${schema}.source".replaceAll("\\$\\{schema\\}", schema);
String update = "UPDATE ${schema}.source SET username = ?, password = ? WHERE source_id = ?".replaceAll("\\$\\{schema\\}", schema);
getTransactionTemplateRequiresNew().execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) {
jdbcTemplate.query(query, rs -> {
int id = rs.getInt("source_id");
String username = rs.getString("username");
String password = rs.getString("password");
if (username != null && !PropertyValueEncryptionUtils.isEncryptedValue(username)) {
username = "ENC(" + defaultStringEncryptor.encrypt(username) + ")";
}
if (password != null && !PropertyValueEncryptionUtils.isEncryptedValue(password)) {
password = "ENC(" + defaultStringEncryptor.encrypt(password) + ")";
}
jdbcTemplate.update(update, username, password, id);
});
}
});
}
}
@Cacheable(cacheNames = CachingSetup.SOURCE_LIST_CACHE)
public Collection<Source> getSources() {
List<Source> sources = sourceRepository.findAll();
Collections.sort(sources, new SortByKey());
return sources;
}
public Source findBySourceKey(final String sourceKey) {
return sourceRepository.findBySourceKey(sourceKey);
}
public Source findBySourceId(final Integer sourceId) {
return sourceRepository.findBySourceId(sourceId);
}
public <T> Map<T, Source> getSourcesMap(SourceMapKey<T> mapKey) {
return getSources().stream().collect(Collectors.toMap(mapKey.getKeyFunc(), s -> s));
}
public void checkConnection(Source source) {
if (source.isCheckConnection()) {
final JdbcTemplate jdbcTemplate = getSourceJdbcTemplate(source);
jdbcTemplate.execute(SqlTranslate.translateSql("select 1;", source.getSourceDialect()).replaceAll(";$", ""));
}
}
public Source getPrioritySourceForDaimon(SourceDaimon.DaimonType daimonType) {
List<Source> sourcesByDaimonPriority = sourceRepository.findAllSortedByDiamonPrioirty(daimonType);
for (Source source : sourcesByDaimonPriority) {
if (!(sourceAccessor.hasAccess(source) && connectionAvailability.computeIfAbsent(source, this::checkConnectionSafe))) {
continue;
}
return source;
}
return null;
}
public Map<SourceDaimon.DaimonType, Source> getPriorityDaimons() {
class SourceValidator {
private Map<Integer, Boolean> checkedSources = new HashMap<>();
private boolean isSourceAvaialble(Source source) {
return checkedSources.computeIfAbsent(source.getSourceId(),
v -> sourceAccessor.hasAccess(source) && connectionAvailability.computeIfAbsent(source, SourceService.this::checkConnectionSafe));
}
}
SourceValidator sourceValidator = new SourceValidator();
Map<SourceDaimon.DaimonType, Source> priorityDaimons = new HashMap<>();
Arrays.asList(SourceDaimon.DaimonType.values()).forEach(d -> {
List<Source> sources = sourceRepository.findAllSortedByDiamonPrioirty(d);
Optional<Source> source = sources.stream().filter(sourceValidator::isSourceAvaialble)
.findFirst();
source.ifPresent(s -> priorityDaimons.put(d, s));
});
return priorityDaimons;
}
public Source getPriorityVocabularySource() {
return getPrioritySourceForDaimon(SourceDaimon.DaimonType.Vocabulary);
}
public SourceInfo getPriorityVocabularySourceInfo() {
Source source = getPrioritySourceForDaimon(SourceDaimon.DaimonType.Vocabulary);
if (source == null) {
return null;
}
return new SourceInfo(source);
}
@CacheEvict(cacheNames = CachingSetup.SOURCE_LIST_CACHE, allEntries = true)
public void invalidateCache() {
}
private boolean checkConnectionSafe(Source source) {
try {
checkConnection(source);
return true;
} catch (CannotGetJdbcConnectionException ex) {
return false;
}
}
private class SortByKey implements Comparator<Source> {
private boolean isAscending;
public SortByKey(boolean ascending) {
isAscending = ascending;
}
public SortByKey() {
this(true);
}
public int compare(Source s1, Source s2) {
return s1.getSourceKey().compareTo(s2.getSourceKey()) * (isAscending ? 1 : -1);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/package-info.java | src/main/java/org/ohdsi/webapi/source/package-info.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Company: Odysseus Data Services, Inc.
* Product Owner/Architecture: Gregory Klebanov
* Authors: Pavel Grafkin, Alexander Saltykov, Vitaly Koulakov, Anton Gackovka, Alexandr Ryabokon, Mikhail Mironov
* Created: Dec 1, 2017
*
*/
@TypeDefs({
@TypeDef(
name = "encryptedString",
typeClass = CheckedEncryptedStringType.class,
parameters = {
@Parameter(name="encryptorRegisteredName", value="defaultStringEncryptor")
}
)
})
package org.ohdsi.webapi.source;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.TypeDef;
import org.hibernate.annotations.TypeDefs;
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceController.java | src/main/java/org/ohdsi/webapi/source/SourceController.java | package org.ohdsi.webapi.source;
import com.odysseusinc.arachne.commons.types.DBMSType;
import com.odysseusinc.logging.event.AddDataSourceEvent;
import com.odysseusinc.logging.event.ChangeDataSourceEvent;
import com.odysseusinc.logging.event.DeleteDataSourceEvent;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
import org.ohdsi.webapi.exception.SourceDuplicateKeyException;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.service.VocabularyService;
import org.ohdsi.webapi.shiro.management.Security;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.jdbc.CannotGetJdbcConnectionException;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.PersistenceException;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.stream.Collectors;
import org.springframework.cache.annotation.CacheEvict;
@Path("/source/")
@Component
@Transactional
public class SourceController extends AbstractDaoService {
public static final String SECURE_MODE_ERROR = "This feature requires the administrator to enable security for the application";
@Autowired
private ApplicationEventPublisher publisher;
@Autowired
private VocabularyService vocabularyService;
@Autowired
private SourceService sourceService;
@Autowired
private SourceRepository sourceRepository;
@Autowired
private SourceDaimonRepository sourceDaimonRepository;
@Autowired
private GenericConversionService conversionService;
@Autowired
private Security securityManager;
@Value("#{!'${security.provider}'.equals('DisabledSecurity')}")
private boolean securityEnabled;
/**
* Gets the list of all Sources in WebAPI database. Sources with a non-null
* deleted_date are not returned (ie: these are soft deleted)
*
* @summary Get Sources
* @return A list of all CDM sources with the ID, name, SQL dialect, and key
* for each source. The {sourceKey} is used in other WebAPI endpoints to
* identify CDMs.
*/
@Path("sources")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Collection<SourceInfo> getSources() {
return sourceService.getSources().stream().map(SourceInfo::new).collect(Collectors.toList());
}
/**
* Refresh cached CDM database metadata
*
* @summary Refresh Sources
* @return A list of all CDM sources with the ID, name, SQL dialect, and key
* for each source (same as the 'sources' endpoint) after refreshing the cached sourced data.
*/
@Path("refresh")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Collection<SourceInfo> refreshSources() {
sourceService.invalidateCache();
vocabularyService.clearVocabularyInfoCache();
sourceService.ensureSourceEncrypted();
return getSources();
}
/**
* Get the priority vocabulary source.
*
* WebAPI designates one CDM vocabulary as the priority vocabulary to be used for vocabulary searches in Atlas.
*
* @summary Get Priority Vocabulary Source
* @return The CDM metadata for the priority vocabulary.
*/
@Path("priorityVocabulary")
@GET
@Produces(MediaType.APPLICATION_JSON)
public SourceInfo getPriorityVocabularySourceInfo() {
return sourceService.getPriorityVocabularySourceInfo();
}
/**
* Get source by key
* @summary Get Source By Key
* @param sourceKey
* @return Metadata for a single Source that matches the <code>sourceKey</code>.
*/
@Path("{key}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public SourceInfo getSource(@PathParam("key") final String sourceKey) {
return sourceRepository.findBySourceKey(sourceKey).getSourceInfo();
}
/**
* Get Source Details
*
* Source Details contains connection-specific information like JDBC url and authentication information.
* @summary Get Source Details
* @param sourceId
* @return
*/
@Path("details/{sourceId}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public SourceDetails getSourceDetails(@PathParam("sourceId") Integer sourceId) {
if (!securityEnabled) {
throw new NotAuthorizedException(SECURE_MODE_ERROR);
}
Source source = sourceRepository.findBySourceId(sourceId);
return new SourceDetails(source);
}
/**
* Create a Source
*
* @summary Create Source
* @param file the keyfile
* @param fileDetail the keyfile details
* @param request contains the source information (name, key, etc)
* @return a new SourceInfo for the created source
* @throws Exception
*/
@Path("")
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@CacheEvict(cacheNames = SourceService.CachingSetup.SOURCE_LIST_CACHE, allEntries = true)
public SourceInfo createSource(@FormDataParam("keyfile") InputStream file, @FormDataParam("keyfile") FormDataContentDisposition fileDetail, @FormDataParam("source") SourceRequest request) throws Exception {
if (!securityEnabled) {
throw new NotAuthorizedException(SECURE_MODE_ERROR);
}
Source sourceByKey = sourceRepository.findBySourceKey(request.getKey());
if (Objects.nonNull(sourceByKey)) {
throw new SourceDuplicateKeyException("The source key has been already used.");
}
Source source = conversionService.convert(request, Source.class);
if(source.getDaimons() != null) {
// First source should get priority = 1
Iterable<Source> sources = sourceRepository.findAll();
source.getDaimons()
.stream()
.filter(sd -> sd.getPriority() <= 0)
.filter(sd -> {
boolean accept = true;
// Check if source daimon of given type with priority > 0 already exists in other sources
for(Source innerSource: sources) {
accept = !innerSource.getDaimons()
.stream()
.anyMatch(innerDaimon -> innerDaimon.getPriority() > 0
&& innerDaimon.getDaimonType().equals(sd.getDaimonType()));
if(!accept) {
break;
}
}
return accept;
})
.forEach(sd -> sd.setPriority(1));
}
Source original = new Source();
original.setSourceDialect(source.getSourceDialect());
setKeyfileData(source, original, file);
source.setCreatedBy(getCurrentUser());
source.setCreatedDate(new Date());
try {
Source saved = sourceRepository.saveAndFlush(source);
sourceService.invalidateCache();
SourceInfo sourceInfo = new SourceInfo(saved);
publisher.publishEvent(new AddDataSourceEvent(this, source.getSourceId(), source.getSourceName()));
return sourceInfo;
} catch (PersistenceException ex) {
throw new SourceDuplicateKeyException("You cannot use this Source Key, please use different one");
}
}
/**
* Updates a Source with the provided details from multiple files
*
* @summary Update Source
* @param file the keyfile
* @param fileDetail the keyfile details
* @param request contains the source information (name, key, etc)
* @return the updated SourceInfo for the source
* @throws Exception
*/
@Path("{sourceId}")
@PUT
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@Transactional
@CacheEvict(cacheNames = SourceService.CachingSetup.SOURCE_LIST_CACHE, allEntries = true)
public SourceInfo updateSource(@PathParam("sourceId") Integer sourceId, @FormDataParam("keyfile") InputStream file, @FormDataParam("keyfile") FormDataContentDisposition fileDetail, @FormDataParam("source") SourceRequest request) throws IOException {
if (!securityEnabled) {
throw new NotAuthorizedException(SECURE_MODE_ERROR);
}
Source updated = conversionService.convert(request, Source.class);
Source source = sourceRepository.findBySourceId(sourceId);
if (source != null) {
updated.setSourceId(sourceId);
updated.setSourceKey(source.getSourceKey());
if (StringUtils.isBlank(updated.getUsername()) ||
Objects.equals(updated.getUsername().trim(), Source.MASQUERADED_USERNAME)) {
updated.setUsername(source.getUsername());
}
if (StringUtils.isBlank(updated.getPassword()) ||
Objects.equals(updated.getPassword().trim(), Source.MASQUERADED_PASSWORD)) {
updated.setPassword(source.getPassword());
}
setKeyfileData(updated, source, file);
transformIfRequired(updated);
if (request.isCheckConnection() == null) {
updated.setCheckConnection(source.isCheckConnection());
}
updated.setModifiedBy(getCurrentUser());
updated.setModifiedDate(new Date());
reuseDeletedDaimons(updated, source);
List<SourceDaimon> removed = source.getDaimons().stream().filter(d -> !updated.getDaimons().contains(d))
.collect(Collectors.toList());
// Delete MUST be called after fetching user or source data to prevent autoflush (see DefaultPersistEventListener.onPersist)
sourceDaimonRepository.delete(removed);
Source result = sourceRepository.save(updated);
publisher.publishEvent(new ChangeDataSourceEvent(this, updated.getSourceId(), updated.getSourceName()));
sourceService.invalidateCache();
return new SourceInfo(result);
} else {
throw new NotFoundException();
}
}
private void reuseDeletedDaimons(Source updated, Source source) {
List<SourceDaimon> daimons = updated.getDaimons().stream().filter(d -> source.getDaimons().contains(d))
.collect(Collectors.toList());
List<SourceDaimon> newDaimons = updated.getDaimons().stream().filter(d -> !source.getDaimons().contains(d))
.collect(Collectors.toList());
List<SourceDaimon> allDaimons = sourceDaimonRepository.findBySource(source);
for (SourceDaimon newSourceDaimon: newDaimons) {
Optional<SourceDaimon> reusedDaimonOpt = allDaimons.stream()
.filter(d -> d.equals(newSourceDaimon))
.findFirst();
if (reusedDaimonOpt.isPresent()) {
SourceDaimon reusedDaimon = reusedDaimonOpt.get();
reusedDaimon.setPriority(newSourceDaimon.getPriority());
reusedDaimon.setTableQualifier(newSourceDaimon.getTableQualifier());
daimons.add(reusedDaimon);
} else {
daimons.add(newSourceDaimon);
}
}
updated.setDaimons(daimons);
}
private void transformIfRequired(Source source) {
if (DBMSType.BIGQUERY.getOhdsiDB().equals(source.getSourceDialect()) && ArrayUtils.isNotEmpty(source.getKeyfile())) {
String connStr = source.getSourceConnection().replaceAll("OAuthPvtKeyPath=.+?(;|\\z)", "");
source.setSourceConnection(connStr);
}
}
private void setKeyfileData(Source updated, Source source, InputStream file) throws IOException {
if (source.supportsKeyfile()) {
if (updated.getKeyfileName() != null) {
if (!Objects.equals(updated.getKeyfileName(), source.getKeyfileName())) {
byte[] fileBytes = IOUtils.toByteArray(file);
updated.setKeyfile(fileBytes);
} else {
updated.setKeyfile(source.getKeyfile());
}
return;
}
}
updated.setKeyfile(null);
updated.setKeyfileName(null);
}
/**
* Delete a source.
*
* @summary Delete Source
* @param sourceId
* @return
* @throws Exception
*/
@Path("{sourceId}")
@DELETE
@Transactional
@CacheEvict(cacheNames = SourceService.CachingSetup.SOURCE_LIST_CACHE, allEntries = true)
public Response delete(@PathParam("sourceId") Integer sourceId) throws Exception {
if (!securityEnabled){
return getInsecureModeResponse();
}
Source source = sourceRepository.findBySourceId(sourceId);
if (source != null) {
sourceRepository.delete(source);
publisher.publishEvent(new DeleteDataSourceEvent(this, sourceId, source.getSourceName()));
sourceService.invalidateCache();
return Response.ok().build();
} else {
throw new NotFoundException();
}
}
/**
* Check source connection.
*
* This method attempts to connect to the source by calling 'select 1' on the source connection.
* @summary Check connection
* @param sourceKey
* @return
*/
@Path("connection/{key}")
@GET
@Produces(MediaType.APPLICATION_JSON)
@Transactional(noRollbackFor = CannotGetJdbcConnectionException.class)
public SourceInfo checkConnection(@PathParam("key") final String sourceKey) {
final Source source = sourceService.findBySourceKey(sourceKey);
sourceService.checkConnection(source);
return source.getSourceInfo();
}
/**
* Get the first daimon (ad associated source) that has priority. In the event
* of a tie, the first source searched wins.
*
* @summary Get Priority Daimons
* @return
*/
@Path("daimon/priority")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Map<SourceDaimon.DaimonType, SourceInfo> getPriorityDaimons() {
return sourceService.getPriorityDaimons()
.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
e -> new SourceInfo(e.getValue())
));
}
/**
* Set priority of daimon
*
* Set the priority of the specified daimon of the specified source, and set the other daimons to 0.
* @summary Set Priority
* @param sourceKey
* @param daimonTypeName
* @return
*/
@Path("{sourceKey}/daimons/{daimonType}/set-priority")
@POST
@Produces(MediaType.APPLICATION_JSON)
@CacheEvict(cacheNames = SourceService.CachingSetup.SOURCE_LIST_CACHE, allEntries = true)
public Response updateSourcePriority(
@PathParam("sourceKey") final String sourceKey,
@PathParam("daimonType") final String daimonTypeName
) {
if (!securityEnabled) {
return getInsecureModeResponse();
}
SourceDaimon.DaimonType daimonType = SourceDaimon.DaimonType.valueOf(daimonTypeName);
List<SourceDaimon> daimonList = sourceDaimonRepository.findByDaimonType(daimonType);
daimonList.forEach(daimon -> {
Integer newPriority = daimon.getSource().getSourceKey().equals(sourceKey) ? 1 : 0;
daimon.setPriority(newPriority);
sourceDaimonRepository.save(daimon);
});
sourceService.invalidateCache();
return Response.ok().build();
}
private Response getInsecureModeResponse() {
return Response.status(Response.Status.UNAUTHORIZED)
.entity(SECURE_MODE_ERROR)
.build();
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceDaimonRepository.java | src/main/java/org/ohdsi/webapi/source/SourceDaimonRepository.java | package org.ohdsi.webapi.source;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
public interface SourceDaimonRepository extends CrudRepository<SourceDaimon, Integer> {
@Query("select sd from SourceDaimon sd join sd.source s where s.deletedDate is null and sd.daimonType = :daimonType and sd.priority >= 0")
List<SourceDaimon> findByDaimonType(@Param("daimonType") SourceDaimon.DaimonType daimonType);
List<SourceDaimon> findBySource(Source source);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceRepository.java | src/main/java/org/ohdsi/webapi/source/SourceRepository.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.source;
import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository;
import org.springframework.data.jpa.repository.Query;
import java.util.List;
/**
*
* @author fdefalco
*/
public interface SourceRepository extends EntityGraphJpaRepository<Source, Integer> {
Source findBySourceKey(String sourceKey);
Source findBySourceId(int sourceId);
@Query("SELECT s FROM Source s JOIN s.daimons sd WHERE sd.daimonType = ?1 ORDER BY sd.priority DESC")
List<Source> findAllSortedByDiamonPrioirty(SourceDaimon.DaimonType daimonType);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceHelper.java | src/main/java/org/ohdsi/webapi/source/SourceHelper.java | package org.ohdsi.webapi.source;
import com.odysseusinc.arachne.commons.types.DBMSType;
import com.odysseusinc.arachne.execution_engine_common.util.BigQueryUtils;
import com.odysseusinc.logging.event.ChangeDataSourceEvent;
import org.apache.commons.io.IOUtils;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
@Component
public class SourceHelper {
private ConcurrentHashMap<Source, String> sourcesConnections = new ConcurrentHashMap<>();
public String getSourceConnectionString(Source source) {
return sourcesConnections.computeIfAbsent(source, s -> {
if (Objects.equals(DBMSType.BIGQUERY.getOhdsiDB(), s.getSourceDialect())) {
return getBQConnectionString(s);
} else {
return s.getSourceConnection();
}
});
}
private String getBQConnectionString(Source source) {
String connectionString = source.getSourceConnection();
if (BigQueryUtils.getBigQueryKeyPath(connectionString) == null) {
byte[] keyFileData = source.getKeyfile();
if (Objects.nonNull(keyFileData)) {
try {
File keyFile = java.nio.file.Files.createTempFile("", ".json").toFile();
try (OutputStream out = new FileOutputStream(keyFile)) {
IOUtils.write(keyFileData, out);
}
String filePath = keyFile.getAbsolutePath();
connectionString = BigQueryUtils.replaceBigQueryKeyPath(connectionString, filePath);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
}
return connectionString;
}
@EventListener
public void onSourceUpdate(ChangeDataSourceEvent changeDataSourceEvent) {
Source s = new Source();
s.setSourceId(changeDataSourceEvent.getId());
sourcesConnections.remove(s);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceDaimon.java | src/main/java/org/ohdsi/webapi/source/SourceDaimon.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.source;
import com.fasterxml.jackson.annotation.JsonIgnore;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import java.io.Serializable;
import java.util.Objects;
/**
*
* @author fdefalco
*/
@Entity(name = "SourceDaimon")
@Table(name="source_daimon")
@SQLDelete(sql = "UPDATE {h-schema}source_daimon SET priority = -1 WHERE SOURCE_DAIMON_ID = ?")
//@Where(clause = "priority >= 0")
public class SourceDaimon implements Serializable {
public enum DaimonType { CDM, Vocabulary, Results, CEM, CEMResults, Temp };
public SourceDaimon() {
}
public SourceDaimon(Source source) {
this.source = source;
}
@Id
@GenericGenerator(
name = "source_daimon_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "source_daimon_sequence"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "source_daimon_generator")
@Column(name="SOURCE_DAIMON_ID")
private int sourceDaimonId;
@ManyToOne
@JsonIgnore
@JoinColumn(name="SOURCE_ID", referencedColumnName="SOURCE_ID")
private Source source;
@Enumerated(EnumType.ORDINAL)
@Column(name="DAIMON_TYPE")
private DaimonType daimonType;
@Column(name="TABLE_QUALIFIER")
private String tableQualifier;
@Column(name="PRIORITY")
private Integer priority;
public int getSourceDaimonId() {
return sourceDaimonId;
}
public void setSourceDaimonId(int sourceDaimonId) {
this.sourceDaimonId = sourceDaimonId;
}
public Source getSource() {
return source;
}
public void setSource(Source source) {
this.source = source;
}
public DaimonType getDaimonType() {
return daimonType;
}
public void setDaimonType(DaimonType daimonType) {
this.daimonType = daimonType;
}
public String getTableQualifier() {
return tableQualifier;
}
public void setTableQualifier(String tableQualifier) {
this.tableQualifier = tableQualifier;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
this.priority = priority;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SourceDaimon)) return false;
SourceDaimon that = (SourceDaimon) o;
return Objects.equals(getSource(), that.getSource()) &&
Objects.equals(getDaimonType(), that.getDaimonType());
}
@Override
public int hashCode() {
return Objects.hash(getSource(), getDaimonType());
}
@Override
public String toString(){
return String.format("sourceDaimonId = %d, daimonType = %s, tableQualifier = %s, priority = %d", sourceDaimonId, daimonType, tableQualifier, priority);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/CheckedEncryptedStringType.java | src/main/java/org/ohdsi/webapi/source/CheckedEncryptedStringType.java | package org.ohdsi.webapi.source;
import com.odysseusinc.datasourcemanager.encryption.EncryptorUtils;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Objects;
import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.jasypt.hibernate4.type.AbstractEncryptedAsStringType;
public class CheckedEncryptedStringType extends AbstractEncryptedAsStringType {
@Override
protected Object convertToObject(String value) {
return value;
}
@Override
public void nullSafeSet(PreparedStatement st, Object value, int index, SharedSessionContractImplementor session) throws HibernateException, SQLException {
checkInitialization();
final String message = convertToString(value);
if (Objects.isNull(message)) {
st.setNull(index, Types.VARCHAR);
return;
}
String encrypted = EncryptorUtils.encrypt(this.encryptor, message);
st.setString(index, encrypted);
}
@Override
public Object nullSafeGet(ResultSet rs, String[] names, SharedSessionContractImplementor session, Object owner) throws HibernateException, SQLException {
checkInitialization();
final String message = rs.getString(names[0]);
if (Objects.isNull(message)) {
return null;
}
return EncryptorUtils.decrypt(this.encryptor, message);
}
@Override
public Class returnedClass() {
return String.class;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/SourceRequest.java | src/main/java/org/ohdsi/webapi/source/SourceRequest.java | package org.ohdsi.webapi.source;
import java.util.Collection;
import javax.validation.constraints.NotNull;
public class SourceRequest {
@NotNull
private String name;
@NotNull
private String dialect;
@NotNull
private String key;
@NotNull
private String connectionString;
private String username;
private String password;
private String krbAuthMethod;
private String krbAdminServer;
private Collection<SourceDaimon> daimons;
private String keyfileName;
private Boolean checkConnection;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDialect() {
return dialect;
}
public void setDialect(String dialect) {
this.dialect = dialect;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getConnectionString() {
return connectionString;
}
public void setConnectionString(String connectionString) {
this.connectionString = connectionString;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Collection<SourceDaimon> getDaimons() {
return daimons;
}
public void setDaimons(Collection<SourceDaimon> daimons) {
this.daimons = daimons;
}
public String getKrbAuthMethod() {
return krbAuthMethod;
}
public void setKrbAuthMethod(String krbAuthMethod) {
this.krbAuthMethod = krbAuthMethod;
}
public String getKrbAdminServer() {
return krbAdminServer;
}
public void setKrbAdminServer(String krbAdminServer) {
this.krbAdminServer = krbAdminServer;
}
public String getKeyfileName() {
return keyfileName;
}
public void setKeyfileName(String keyfileName) {
this.keyfileName = keyfileName;
}
public Boolean isCheckConnection() {
return checkConnection;
}
public void setCheckConnection(Boolean checkConnection) {
this.checkConnection = checkConnection;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/source/Source.java | src/main/java/org/ohdsi/webapi/source/Source.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.source;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.KerberosAuthMechanism;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import com.google.common.collect.ImmutableList;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.Where;
import org.ohdsi.webapi.model.CommonEntity;
import org.ohdsi.webapi.source.SourceDaimon.DaimonType;
/**
*
* @author fdefalco
*/
@Entity(name = "Source")
@Table(name="source")
@SQLDelete(sql = "UPDATE {h-schema}source SET deleted_date = current_timestamp WHERE SOURCE_ID = ?")
@Where(clause = "deleted_date IS NULL")
public class Source extends CommonEntity<Integer> implements Serializable {
public static final String MASQUERADED_USERNAME = "<username>";
public static final String MASQUERADED_PASSWORD = "<password>";
public static final String IMPALA_DATASOURCE = "impala";
public static final String BIGQUERY_DATASOURCE = "bigquery";
public static final List<String> DBMS_KEYTAB_SUPPORT = ImmutableList.of(IMPALA_DATASOURCE, BIGQUERY_DATASOURCE);
@Id
@GenericGenerator(
name = "source_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "source_sequence"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "source_generator")
@Column(name="SOURCE_ID")
private int sourceId;
@OneToMany(fetch= FetchType.EAGER, cascade = CascadeType.ALL, mappedBy = "source")
@Where(clause = "priority >= 0")
private Collection<SourceDaimon> daimons;
@Column(name="SOURCE_NAME")
private String sourceName;
@Column(name="SOURCE_DIALECT")
private String sourceDialect;
@Column(name="SOURCE_CONNECTION")
private String sourceConnection;
@Column(name="SOURCE_KEY")
private String sourceKey;
@Column
@Type(type = "encryptedString")
private String username;
@Column
@Type(type = "encryptedString")
private String password;
@Column(name = "deleted_date")
private Date deletedDate;
@Column(name = "krb_keytab")
private byte[] keyfile;
@Column(name = "keytab_name")
private String keyfileName;
@Column(name = "krb_admin_server")
private String krbAdminServer;
@Column(name = "krb_auth_method")
@Enumerated(EnumType.STRING)
private KerberosAuthMechanism krbAuthMethod;
@Column(name = "is_cache_enabled")
private boolean isCacheEnabled;
@Column(name = "check_connection")
private boolean checkConnection = true;
public String getTableQualifier(DaimonType daimonType) {
String result = getTableQualifierOrNull(daimonType);
if (result == null)
throw new RuntimeException("DaimonType (" + daimonType + ") not found in Source");
return result;
}
public String getTableQualifierOrNull(DaimonType daimonType) {
if (this.getDaimons() != null){
for (SourceDaimon sourceDaimon : this.getDaimons()) {
if (sourceDaimon.getDaimonType() == daimonType) {
return sourceDaimon.getTableQualifier();
}
}
if (DaimonType.Vocabulary.equals(daimonType)) {
return getDaimons().stream()
.filter(d -> DaimonType.CDM.equals(d.getDaimonType()))
.map(SourceDaimon::getTableQualifier)
.findFirst()
.orElse(null);
}
}
return null;
}
public String getSourceKey() {
return sourceKey;
}
public Collection<SourceDaimon> getDaimons() {
return daimons;
}
public void setDaimons(Collection<SourceDaimon> daimons) {
this.daimons = daimons;
}
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
public int getSourceId() {
return sourceId;
}
public void setSourceId(int sourceId) {
this.sourceId = sourceId;
}
public String getSourceName() {
return sourceName;
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public String getSourceDialect() {
return sourceDialect;
}
public void setSourceDialect(String sourceDialect) {
this.sourceDialect = sourceDialect;
}
public String getSourceConnection() {
return sourceConnection;
}
public void setSourceConnection(String sourceConnection) {
this.sourceConnection = sourceConnection;
}
public SourceInfo getSourceInfo() {
return new SourceInfo(this);
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public byte[] getKeyfile() {
return keyfile;
}
public void setKeyfile(byte[] keyfile) {
this.keyfile = keyfile;
}
public String getKeyfileName() {
return keyfileName;
}
public void setKeyfileName(String keyfileName) {
this.keyfileName = keyfileName;
}
public KerberosAuthMechanism getKrbAuthMethod() {
return krbAuthMethod;
}
public void setKrbAuthMethod(KerberosAuthMechanism krbAuthMethod) {
this.krbAuthMethod = krbAuthMethod;
}
public String getKrbAdminServer() {
return krbAdminServer;
}
public void setKrbAdminServer(String krbAdminServer) {
this.krbAdminServer = krbAdminServer;
}
public boolean supportsKeyfile() {
return DBMS_KEYTAB_SUPPORT.stream().anyMatch(t -> t.equalsIgnoreCase(getSourceDialect()));
}
public boolean isIsCacheEnabled() {
return isCacheEnabled;
}
public void setIsCacheEnabled(boolean isCacheEnabled) {
this.isCacheEnabled = isCacheEnabled;
}
public boolean isCheckConnection() {
return checkConnection;
}
public void setCheckConnection(boolean checkConnection) {
this.checkConnection = checkConnection;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Source source = (Source) o;
return sourceId == source.sourceId;
}
@Override
public int hashCode() {
return Objects.hash(sourceId);
}
@Override
public String toString() {
String source = "sourceId=" + sourceId +
", daimons=" + daimons +
", sourceName='" + sourceName + '\'' +
", sourceDialect='" + sourceDialect + '\'' +
", sourceKey='" + sourceKey + '\'' +
", isCacheEnabled='" + isCacheEnabled;
if (IMPALA_DATASOURCE.equalsIgnoreCase(sourceDialect)){
source += '\'' +
", krbAdminServer='" + krbAdminServer + '\'' +
", krbAuthMethod=" + krbAuthMethod;
}
return source;
}
@Override
public Integer getId() {
return sourceId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/i18n/LocaleDTO.java | src/main/java/org/ohdsi/webapi/i18n/LocaleDTO.java | package org.ohdsi.webapi.i18n;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class LocaleDTO {
private String code;
private String name;
@JsonProperty("default")
private boolean defaultLocale;
@JsonCreator
public LocaleDTO(@JsonProperty("code") String code, @JsonProperty("name") String name,
@JsonProperty("default") boolean defaultLocale) {
this.code = code;
this.name = name;
this.defaultLocale = defaultLocale;
}
public String getCode() {
return code;
}
public String getName() {
return name;
}
public boolean isDefaultLocale() {
return defaultLocale;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/i18n/I18nServiceImpl.java | src/main/java/org/ohdsi/webapi/i18n/I18nServiceImpl.java | package org.ohdsi.webapi.i18n;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ohdsi.circe.helper.ResourceHelper;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.ws.rs.InternalServerErrorException;
import java.io.IOException;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
@Component
public class I18nServiceImpl implements I18nService {
private List<LocaleDTO> availableLocales;
@PostConstruct
public void init() throws IOException {
String json = ResourceHelper.GetResourceAsString("/i18n/locales.json");
ObjectMapper objectMapper = new ObjectMapper();
JavaType type = objectMapper.getTypeFactory().constructCollectionType(List.class, LocaleDTO.class);
availableLocales = objectMapper.readValue(json, type);
}
@Override
public List<LocaleDTO> getAvailableLocales() {
return Collections.unmodifiableList(availableLocales);
}
@Override
public String translate(String key) {
return translate(key, key);
}
@Override
public String translate(String key, String defaultValue) {
try {
Locale locale = LocaleContextHolder.getLocale();
String messages = getLocaleResource(locale);
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readTree(messages);
String pointer = "/" + key.replaceAll("\\.", "/");
JsonNode node = root.at(pointer);
return node.isValueNode() ? node.asText() : defaultValue;
}catch (IOException e) {
throw new InternalServerErrorException(e);
}
}
@Override
public String getLocaleResource(Locale locale) {
String resourcePath = String.format("/i18n/messages_%s.json", locale.getLanguage());
URL resourceURL = this.getClass().getResource(resourcePath);
String messages = "";
if (resourceURL != null) {
messages = ResourceHelper.GetResourceAsString(resourcePath);
}
return messages;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/i18n/I18nService.java | src/main/java/org/ohdsi/webapi/i18n/I18nService.java | package org.ohdsi.webapi.i18n;
import java.util.List;
import java.util.Locale;
public interface I18nService {
List<LocaleDTO> getAvailableLocales();
String translate(String key);
String translate(String key, String defaultValue);
String getLocaleResource(Locale locale);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/i18n/LocaleFilter.java | src/main/java/org/ohdsi/webapi/i18n/LocaleFilter.java | package org.ohdsi.webapi.i18n;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.Constants;
import org.springframework.context.i18n.LocaleContextHolder;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.ext.Provider;
import java.util.Locale;
@Provider
public class LocaleFilter implements ContainerRequestFilter {
private String ACCEPT_LANGUAGE_HEADER = "Accept-Language";
private String LANG_PARAM = "lang";
private String defaultLocale = "en";
@Override
public void filter(ContainerRequestContext requestContext) {
Locale locale = Locale.forLanguageTag(defaultLocale);
String userHeader = requestContext.getHeaderString(Constants.Headers.USER_LANGAUGE);
if (StringUtils.isNotBlank(userHeader)) {
locale = Locale.forLanguageTag(userHeader);
} else if (requestContext.getUriInfo().getQueryParameters().containsKey(LANG_PARAM)) {
locale = Locale.forLanguageTag(requestContext.getUriInfo().getQueryParameters().getFirst(LANG_PARAM));
} else if (requestContext.getHeaderString(ACCEPT_LANGUAGE_HEADER) != null) {
locale = Locale.forLanguageTag(requestContext.getHeaderString(ACCEPT_LANGUAGE_HEADER));
}
requestContext.setProperty("language", locale);
LocaleContextHolder.setLocale(locale);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/i18n/I18nController.java | src/main/java/org/ohdsi/webapi/i18n/I18nController.java | package org.ohdsi.webapi.i18n;
import com.google.common.collect.ImmutableList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
@Path("/i18n/")
@Controller
public class I18nController {
@Value("${i18n.enabled}")
private boolean i18nEnabled = true;
@Value("${i18n.defaultLocale}")
private String defaultLocale = "en";
@Autowired
private I18nService i18nService;
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public Response getResources(@Context ContainerRequestContext requestContext) {
Locale locale = (Locale) requestContext.getProperty("language");
if (!this.i18nEnabled || locale == null || !isLocaleSupported(locale.getLanguage())) {
locale = Locale.forLanguageTag(defaultLocale);
}
String messages = i18nService.getLocaleResource(locale);
return Response.ok(messages).build();
}
private boolean isLocaleSupported(String code) {
return i18nService.getAvailableLocales().stream().anyMatch(l -> Objects.equals(code, l.getCode()));
}
@GET
@Path("/locales")
@Produces(MediaType.APPLICATION_JSON)
public List<LocaleDTO> getAvailableLocales() {
if (this.i18nEnabled) {
return i18nService.getAvailableLocales();
}
// if i18n is disabled, then return only default locale
return ImmutableList.of(new LocaleDTO(this.defaultLocale, null, true));
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/ReusableController.java | src/main/java/org/ohdsi/webapi/reusable/ReusableController.java | package org.ohdsi.webapi.reusable;
import org.ohdsi.webapi.Pagination;
import org.ohdsi.webapi.reusable.dto.ReusableDTO;
import org.ohdsi.webapi.reusable.dto.ReusableVersionFullDTO;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Controller;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import java.util.Collections;
import java.util.List;
@Path("/reusable")
@Controller
public class ReusableController {
private final ReusableService reusableService;
@Autowired
public ReusableController(ReusableService reusableService) {
this.reusableService = reusableService;
}
@POST
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public ReusableDTO create(final ReusableDTO dto) {
return reusableService.create(dto);
}
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public Page<ReusableDTO> page(@Pagination Pageable pageable) {
return reusableService.page(pageable);
}
@PUT
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public ReusableDTO update(@PathParam("id") final Integer id, final ReusableDTO dto) {
return reusableService.update(id, dto);
}
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/{id}")
public ReusableDTO copy(@PathParam("id") final int id) {
return reusableService.copy(id);
}
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public ReusableDTO get(@PathParam("id") final Integer id) {
return reusableService.getDTOById(id);
}
@GET
@Path("/{id}/exists")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public boolean exists(@PathParam("id") @DefaultValue("0") final int id, @QueryParam("name") String name) {
return reusableService.exists(id, name);
}
@DELETE
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public void delete(@PathParam("id") final Integer id) {
reusableService.delete(id);
}
/**
* Assign tag to Reusable
*
* @param id
* @param tagId
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/")
public void assignTag(@PathParam("id") final int id, final int tagId) {
reusableService.assignTag(id, tagId);
}
/**
* Unassign tag from Reusable
*
* @param id
* @param tagId
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/{tagId}")
public void unassignTag(@PathParam("id") final int id, @PathParam("tagId") final int tagId) {
reusableService.unassignTag(id, tagId);
}
/**
* Assign protected tag to Reusable
*
* @param id
* @param tagId
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/")
public void assignPermissionProtectedTag(@PathParam("id") int id, final int tagId) {
reusableService.assignTag(id, tagId);
}
/**
* Unassign protected tag from Reusable
*
* @param id
* @param tagId
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/{tagId}")
public void unassignPermissionProtectedTag(@PathParam("id") final int id, @PathParam("tagId") final int tagId) {
reusableService.unassignTag(id, tagId);
}
/**
* Get list of versions of Reusable
*
* @param id
* @return
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/")
public List<VersionDTO> getVersions(@PathParam("id") final long id) {
return reusableService.getVersions(id);
}
/**
* Get version of Reusable
*
* @param id
* @param version
* @return
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
public ReusableVersionFullDTO getVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
return reusableService.getVersion(id, version);
}
/**
* Update version of Reusable
*
* @param id
* @param version
* @param updateDTO
* @return
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
public VersionDTO updateVersion(@PathParam("id") final int id, @PathParam("version") final int version,
VersionUpdateDTO updateDTO) {
return reusableService.updateVersion(id, version, updateDTO);
}
/**
* Delete version of Reusable
*
* @param id
* @param version
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
public void deleteVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
reusableService.deleteVersion(id, version);
}
/**
* Create a new asset form version of Reusable
*
* @param id
* @param version
* @return
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}/createAsset")
public ReusableDTO copyAssetFromVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
return reusableService.copyAssetFromVersion(id, version);
}
/**
* Get list of reusables with assigned tags
*
* @param requestDTO
* @return
*/
@POST
@Path("/byTags")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<ReusableDTO> listByTags(TagNameListRequestDTO requestDTO) {
if (requestDTO == null || requestDTO.getNames() == null || requestDTO.getNames().isEmpty()) {
return Collections.emptyList();
}
return reusableService.listByTags(requestDTO);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/ReusableService.java | src/main/java/org/ohdsi/webapi/reusable/ReusableService.java | package org.ohdsi.webapi.reusable;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.reusable.dto.ReusableDTO;
import org.ohdsi.webapi.reusable.dto.ReusableVersionFullDTO;
import org.ohdsi.webapi.reusable.repository.ReusableRepository;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.tag.domain.HasTags;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.util.NameUtils;
import org.ohdsi.webapi.versioning.domain.ReusableVersion;
import org.ohdsi.webapi.versioning.domain.Version;
import org.ohdsi.webapi.versioning.domain.VersionBase;
import org.ohdsi.webapi.versioning.domain.VersionType;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.ohdsi.webapi.versioning.service.VersionService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.stream.Collectors;
@Service
@Transactional
public class ReusableService extends AbstractDaoService implements HasTags<Integer> {
private final ReusableRepository reusableRepository;
private final EntityManager entityManager;
private final ConversionService conversionService;
private final PermissionService permissionService;
private final VersionService<ReusableVersion> versionService;
@Autowired
public ReusableService(
ReusableRepository reusableRepository,
EntityManager entityManager,
ConversionService conversionService,
PermissionService permissionService,
VersionService<ReusableVersion> versionService) {
this.reusableRepository = reusableRepository;
this.entityManager = entityManager;
this.conversionService = conversionService;
this.permissionService = permissionService;
this.versionService = versionService;
}
public ReusableDTO create(ReusableDTO dto) {
Reusable reusable = conversionService.convert(dto, Reusable.class);
Reusable saved = create(reusable);
return conversionService.convert(saved, ReusableDTO.class);
}
public Reusable create(Reusable reusable) {
reusable.setCreatedBy(getCurrentUser());
reusable.setCreatedDate(new Date());
reusable.setModifiedBy(null);
reusable.setModifiedDate(null);
return save(reusable);
}
public Reusable getById(Integer id) {
return reusableRepository.findOne(id);
}
public ReusableDTO getDTOById(Integer id) {
Reusable reusable = reusableRepository.findOne(id);
return conversionService.convert(reusable, ReusableDTO.class);
}
public List<Reusable> list() {
return reusableRepository.findAll();
}
public Page<ReusableDTO> page(final Pageable pageable) {
return reusableRepository.findAll(pageable)
.map(reusable -> {
final ReusableDTO dto = conversionService.convert(reusable, ReusableDTO.class);
permissionService.fillWriteAccess(reusable, dto);
return dto;
});
}
public ReusableDTO update(Integer id, ReusableDTO entity) {
Date currentTime = Calendar.getInstance().getTime();
saveVersion(id);
Reusable existing = reusableRepository.findOne(id);
UserEntity modifier = userRepository.findByLogin(security.getSubject());
existing.setName(entity.getName())
.setDescription(entity.getDescription())
.setData(entity.getData());
existing.setModifiedBy(modifier);
existing.setModifiedDate(currentTime);
Reusable saved = save(existing);
return conversionService.convert(saved, ReusableDTO.class);
}
public ReusableDTO copy(Integer id) {
ReusableDTO def = getDTOById(id);
def.setId(null);
def.setTags(null);
def.setName(NameUtils.getNameForCopy(def.getName(), this::getNamesLike, reusableRepository.findByName(def.getName())));
return create(def);
}
public void assignTag(Integer id, int tagId) {
Reusable entity = getById(id);
assignTag(entity, tagId);
}
public void unassignTag(Integer id, int tagId) {
Reusable entity = getById(id);
unassignTag(entity, tagId);
}
public void delete(Integer id) {
Reusable existing = reusableRepository.findOne(id);
checkOwnerOrAdminOrModerator(existing.getCreatedBy());
reusableRepository.delete(id);
}
public List<VersionDTO> getVersions(long id) {
List<VersionBase> versions = versionService.getVersions(VersionType.REUSABLE, id);
return versions.stream()
.map(v -> conversionService.convert(v, VersionDTO.class))
.collect(Collectors.toList());
}
public ReusableVersionFullDTO getVersion(int id, int version) {
checkVersion(id, version, false);
ReusableVersion reusableVersion = versionService.getById(VersionType.REUSABLE, id, version);
return conversionService.convert(reusableVersion, ReusableVersionFullDTO.class);
}
public VersionDTO updateVersion(int id, int version, VersionUpdateDTO updateDTO) {
checkVersion(id, version);
updateDTO.setAssetId(id);
updateDTO.setVersion(version);
ReusableVersion updated = versionService.update(VersionType.REUSABLE, updateDTO);
return conversionService.convert(updated, VersionDTO.class);
}
public void deleteVersion(int id, int version) {
checkVersion(id, version);
versionService.delete(VersionType.REUSABLE, id, version);
}
public ReusableDTO copyAssetFromVersion(int id, int version) {
checkVersion(id, version, false);
ReusableVersion reusableVersion = versionService.getById(VersionType.REUSABLE, id, version);
ReusableVersionFullDTO fullDTO = conversionService.convert(reusableVersion, ReusableVersionFullDTO.class);
ReusableDTO dto = conversionService.convert(fullDTO.getEntityDTO(), ReusableDTO.class);
dto.setId(null);
dto.setTags(null);
dto.setName(NameUtils.getNameForCopy(dto.getName(), this::getNamesLike,
reusableRepository.findByName(dto.getName())));
return create(dto);
}
public List<ReusableDTO> listByTags(TagNameListRequestDTO requestDTO) {
List<String> names = requestDTO.getNames().stream()
.map(name -> name.toLowerCase(Locale.ROOT))
.collect(Collectors.toList());
List<Reusable> entities = reusableRepository.findByTags(names);
return listByTags(entities, names, ReusableDTO.class);
}
private void checkVersion(int id, int version) {
checkVersion(id, version, true);
}
private void checkVersion(int id, int version, boolean checkOwnerShip) {
Version reusableVersion = versionService.getById(VersionType.REUSABLE, id, version);
ExceptionUtils.throwNotFoundExceptionIfNull(reusableVersion,
String.format("There is no reusable version with id = %d.", version));
Reusable entity = this.reusableRepository.findOne(id);
if (checkOwnerShip) {
checkOwnerOrAdminOrGranted(entity);
}
}
public ReusableVersion saveVersion(int id) {
Reusable def = this.reusableRepository.findOne(id);
ReusableVersion version = conversionService.convert(def, ReusableVersion.class);
UserEntity user = Objects.nonNull(def.getModifiedBy()) ? def.getModifiedBy() : def.getCreatedBy();
Date versionDate = Objects.nonNull(def.getModifiedDate()) ? def.getModifiedDate() : def.getCreatedDate();
version.setCreatedBy(user);
version.setCreatedDate(versionDate);
return versionService.create(VersionType.REUSABLE, version);
}
private Reusable save(Reusable reusable) {
reusable = reusableRepository.saveAndFlush(reusable);
entityManager.refresh(reusable);
return reusableRepository.findOne(reusable.getId());
}
public boolean exists(final int id, final String name) {
return reusableRepository.existsCount(id, name) > 0;
}
public List<String> getNamesLike(String copyName) {
return reusableRepository.findAllByNameStartsWith(copyName).stream().map(Reusable::getName).collect(Collectors.toList());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/dto/ReusableDTO.java | src/main/java/org/ohdsi/webapi/reusable/dto/ReusableDTO.java | package org.ohdsi.webapi.reusable.dto;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.webapi.service.dto.CommonEntityExtDTO;
import java.util.Objects;
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ReusableDTO extends CommonEntityExtDTO {
@JsonProperty
Integer id;
@JsonProperty
private String name;
@JsonProperty
private String description;
@JsonProperty
private String data;
@Override
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ReusableDTO that = (ReusableDTO) o;
return Objects.equals(id, that.id) && Objects.equals(name, that.name) && Objects.equals(description, that.description) && Objects.equals(data, that.data);
}
@Override
public int hashCode() {
return Objects.hash(id, name, description, data);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/dto/ReusableVersionFullDTO.java | src/main/java/org/ohdsi/webapi/reusable/dto/ReusableVersionFullDTO.java | package org.ohdsi.webapi.reusable.dto;
import org.ohdsi.webapi.versioning.dto.VersionFullDTO;
public class ReusableVersionFullDTO extends VersionFullDTO<ReusableDTO> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/domain/Reusable.java | src/main/java/org/ohdsi/webapi/reusable/domain/Reusable.java | package org.ohdsi.webapi.reusable.domain;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.ohdsi.webapi.model.CommonEntityExt;
import org.ohdsi.webapi.tag.domain.Tag;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.Table;
import java.util.Set;
@Entity(name = "Reusable")
@Table(name = "reusable")
public class Reusable extends CommonEntityExt<Integer> {
@Id
@GenericGenerator(
name = "reusable_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "reusable_seq"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "reusable_generator")
private Integer id;
@Column(name = "name")
private String name;
@Column(name = "description")
private String description;
@Column(name = "data")
private String data;
@ManyToMany(targetEntity = Tag.class, fetch = FetchType.LAZY)
@JoinTable(name = "reusable_tag",
joinColumns = @JoinColumn(name = "asset_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "tag_id", referencedColumnName = "id"))
private Set<Tag> tags;
@Override
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public Reusable setName(String name) {
this.name = name;
return this;
}
public String getDescription() {
return description;
}
public Reusable setDescription(String description) {
this.description = description;
return this;
}
public String getData() {
return data;
}
public Reusable setData(String data) {
this.data = data;
return this;
}
@Override
public Set<Tag> getTags() {
return tags;
}
@Override
public void setTags(Set<Tag> tags) {
this.tags = tags;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/repository/ReusableRepository.java | src/main/java/org/ohdsi/webapi/reusable/repository/ReusableRepository.java | package org.ohdsi.webapi.reusable.repository;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
import java.util.Optional;
public interface ReusableRepository extends JpaRepository<Reusable, Integer> {
Optional<Reusable> findByName(String name);
@Query("SELECT r FROM Reusable r WHERE r.name LIKE ?1 ESCAPE '\\'")
List<Reusable> findAllByNameStartsWith(String pattern);
@Query("SELECT COUNT(r) FROM Reusable r WHERE r.name = :name and r.id <> :id")
int existsCount(@Param("id") Integer id, @Param("name") String name);
@Query("SELECT DISTINCT r FROM Reusable r JOIN FETCH r.tags t WHERE lower(t.name) in :tagNames")
List<Reusable> findByTags(@Param("tagNames") List<String> tagNames);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/converter/ReusableVersionToReusableVersionFullDTOConverter.java | src/main/java/org/ohdsi/webapi/reusable/converter/ReusableVersionToReusableVersionFullDTOConverter.java | package org.ohdsi.webapi.reusable.converter;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.reusable.dto.ReusableDTO;
import org.ohdsi.webapi.reusable.dto.ReusableVersionFullDTO;
import org.ohdsi.webapi.reusable.repository.ReusableRepository;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.versioning.domain.ReusableVersion;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class ReusableVersionToReusableVersionFullDTOConverter
extends BaseConversionServiceAwareConverter<ReusableVersion, ReusableVersionFullDTO> {
@Autowired
private ReusableRepository repository;
@Override
public ReusableVersionFullDTO convert(ReusableVersion source) {
Reusable def = this.repository.findOne(source.getAssetId().intValue());
ExceptionUtils.throwNotFoundExceptionIfNull(def,
String.format("There is no reusable with id = %d.", source.getAssetId()));
Reusable entity = new Reusable();
entity.setId(def.getId());
entity.setTags(def.getTags());
entity.setName(def.getName());
entity.setCreatedBy(def.getCreatedBy());
entity.setCreatedDate(def.getCreatedDate());
entity.setModifiedBy(def.getModifiedBy());
entity.setModifiedDate(def.getModifiedDate());
entity.setDescription(source.getDescription());
entity.setData(source.getAssetJson());
ReusableVersionFullDTO target = new ReusableVersionFullDTO();
target.setVersionDTO(conversionService.convert(source, VersionDTO.class));
target.setEntityDTO(conversionService.convert(entity, ReusableDTO.class));
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/converter/ReusableDTOToReusableConverter.java | src/main/java/org/ohdsi/webapi/reusable/converter/ReusableDTOToReusableConverter.java | package org.ohdsi.webapi.reusable.converter;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.reusable.dto.ReusableDTO;
import org.ohdsi.webapi.service.converters.BaseCommonDTOExtToEntityExtConverter;
import org.springframework.stereotype.Component;
@Component
public class ReusableDTOToReusableConverter extends BaseCommonDTOExtToEntityExtConverter<ReusableDTO, Reusable> {
protected Reusable createResultObject() {
return new Reusable();
}
@Override
protected void doConvert(ReusableDTO source, Reusable target) {
target.setId(source.getId());
target.setName(source.getName());
target.setDescription(source.getDescription());
target.setData(source.getData());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/converter/ReusableToReusableVersionConverter.java | src/main/java/org/ohdsi/webapi/reusable/converter/ReusableToReusableVersionConverter.java | package org.ohdsi.webapi.reusable.converter;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.versioning.domain.ReusableVersion;
import org.springframework.stereotype.Component;
@Component
public class ReusableToReusableVersionConverter
extends BaseConversionServiceAwareConverter<Reusable, ReusableVersion> {
@Override
public ReusableVersion convert(Reusable source) {
ReusableVersion target = new ReusableVersion();
target.setAssetId(source.getId());
target.setDescription(source.getDescription());
target.setAssetJson(source.getData());
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/reusable/converter/ReusableToReusableDTOConverter.java | src/main/java/org/ohdsi/webapi/reusable/converter/ReusableToReusableDTOConverter.java | package org.ohdsi.webapi.reusable.converter;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.reusable.dto.ReusableDTO;
import org.ohdsi.webapi.service.converters.BaseCommonEntityExtToDTOExtConverter;
import org.springframework.stereotype.Component;
@Component
public class ReusableToReusableDTOConverter extends BaseCommonEntityExtToDTOExtConverter<Reusable, ReusableDTO> {
@Override
protected void doConvert(Reusable source, ReusableDTO target) {
target.setId(source.getId());
target.setName(source.getName());
target.setDescription(source.getDescription());
target.setData(source.getData());
}
protected ReusableDTO createResultObject() {
return new ReusableDTO();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/controller/ScriptExecutionCallbackController.java | src/main/java/org/ohdsi/webapi/executionengine/controller/ScriptExecutionCallbackController.java | package org.ohdsi.webapi.executionengine.controller;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AnalysisExecutionStatusDTO;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AnalysisResultDTO;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AnalysisResultStatusDTO;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.glassfish.jersey.media.multipart.BodyPartEntity;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.hibernate.Hibernate;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContent;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContentList;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import org.ohdsi.webapi.executionengine.exception.ScriptCallbackException;
import org.ohdsi.webapi.executionengine.repository.AnalysisExecutionRepository;
import org.ohdsi.webapi.executionengine.repository.AnalysisResultFileContentRepository;
import org.ohdsi.webapi.executionengine.repository.ExecutionEngineGenerationRepository;
import org.ohdsi.webapi.executionengine.service.AnalysisResultFileContentSensitiveInfoService;
import org.ohdsi.webapi.executionengine.service.AnalysisZipRepackService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.MediaType;
import java.io.IOException;
import java.util.*;
import static org.ohdsi.webapi.Constants.Variables.SOURCE;
import static org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus.Status.RUNNING;
import static org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus.Status.STARTED;
/**
* REST Services related to working with Arachne Execution Engine
* Callbacks
*
* @summary Arachne Execution Engine Callbacks
*/
@Controller
@Path("/executionservice/callbacks")
public class ScriptExecutionCallbackController {
private static final Logger log = LoggerFactory.getLogger(ScriptExecutionCallbackController.class);
private static final String EXECUTION_NOT_FOUND = "Analysis execution with id {%d} not found";
private final ExecutionEngineGenerationRepository executionEngineGenerationRepository;
private final AnalysisExecutionRepository analysisExecutionRepository;
private final AnalysisResultFileContentRepository analysisResultFileContentRepository;
private final AnalysisResultFileContentSensitiveInfoService sensitiveInfoService;
private final AnalysisZipRepackService analysisZipRepackService;
@Value("${analysis.result.zipVolumeSizeMb}")
private int zipVolumeSizeMb;
@Autowired
public ScriptExecutionCallbackController(ExecutionEngineGenerationRepository executionEngineGenerationRepository,
AnalysisExecutionRepository analysisExecutionRepository,
AnalysisResultFileContentRepository analysisResultFileContentRepository,
AnalysisResultFileContentSensitiveInfoService sensitiveInfoService,
AnalysisZipRepackService analysisZipRepackService) {
this.executionEngineGenerationRepository = executionEngineGenerationRepository;
this.analysisExecutionRepository = analysisExecutionRepository;
this.analysisResultFileContentRepository = analysisResultFileContentRepository;
this.sensitiveInfoService = sensitiveInfoService;
this.analysisZipRepackService = analysisZipRepackService;
}
/**
* Update an execution submission
*
* @summary Update execution submission
* @param id The execution ID
* @param password The password
* @param status The updated status
*/
@Path(value = "submission/{id}/status/update/{password}")
@Consumes(MediaType.APPLICATION_JSON)
@POST
@Transactional
public void statusUpdate(@PathParam("id") Long id,
@PathParam("password") String password,
AnalysisExecutionStatusDTO status) {
log.info("Accepted an updateSubmission request. ID:{}, Update date:{} Log: {}",
status.getId(), status.getStdoutDate(), status.getStdout());
ExecutionEngineGenerationEntity executionEngineGeneration = executionEngineGenerationRepository.findById(id)
.orElseThrow(() -> new ScriptCallbackException(String.format(EXECUTION_NOT_FOUND, id)));
ExecutionEngineAnalysisStatus analysisExecution = executionEngineGeneration.getAnalysisExecution();
Hibernate.initialize(analysisExecution);
if (Objects.equals(password, analysisExecution.getExecutionEngineGeneration().getUpdatePassword())
&& ( analysisExecution.getExecutionStatus().equals(STARTED)
|| analysisExecution.getExecutionStatus().equals(RUNNING))
) {
analysisExecution.setExecutionStatus(RUNNING);
analysisExecutionRepository.saveAndFlush(analysisExecution);
}
}
/**
* Update the execution multipart information
*
* @summary Update execution information
* @param id The execution ID
* @param password The password
* @param multiPart The multipart form data
*/
@Path(value = "submission/{id}/result/{password}")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@POST
@Transactional
public void analysisResult(@PathParam("id") Long id,
@PathParam("password") String password,
FormDataMultiPart multiPart) {
log.info("Accepted an analysisResult request. ID:{}", id);
ExecutionEngineGenerationEntity executionEngineGeneration = executionEngineGenerationRepository.findById(id)
.orElseThrow(() -> new ScriptCallbackException(String.format(EXECUTION_NOT_FOUND, id)));
ExecutionEngineAnalysisStatus analysisExecution = executionEngineGeneration.getAnalysisExecution();
if (Objects.equals(password, analysisExecution.getExecutionEngineGeneration().getUpdatePassword())) {
AnalysisResultDTO analysisResultDTO =
multiPart.getField("analysisResult").getValueAs(AnalysisResultDTO.class);
AnalysisResultStatusDTO status = analysisResultDTO.getStatus();
if (status == AnalysisResultStatusDTO.EXECUTED) {
analysisExecution.setExecutionStatus(ExecutionEngineAnalysisStatus.Status.COMPLETED);
} else if (status == AnalysisResultStatusDTO.FAILED) {
analysisExecution.setExecutionStatus(ExecutionEngineAnalysisStatus.Status.FAILED);
}
analysisExecutionRepository.saveAndFlush(analysisExecution);
try {
processAndSaveAnalysisResultFiles(multiPart, analysisExecution, analysisResultDTO);
}catch (Exception e){
log.warn("Failed to save files for execution ID:{}", id, e);
}
} else {
log.error("Update password not matched for execution ID:{}", id);
}
}
private void processAndSaveAnalysisResultFiles(
FormDataMultiPart multiPart,
ExecutionEngineAnalysisStatus analysisExecution,
AnalysisResultDTO analysisResultDTO) {
Map<String, Object> variables = Collections.singletonMap(SOURCE, analysisExecution.getExecutionEngineGeneration().getSource());
List<FormDataBodyPart> bodyParts = multiPart.getFields("file");
AnalysisResultFileContentList contentList = new AnalysisResultFileContentList();
if (bodyParts != null) {
Map<String,Integer> duplicates = new HashMap<>();
for (FormDataBodyPart bodyPart : bodyParts) {
BodyPartEntity bodyPartEntity =
(BodyPartEntity) bodyPart.getEntity();
String fileName = bodyPart.getContentDisposition().getFileName();
String extension = FilenameUtils.getExtension(fileName);
int count = duplicates.getOrDefault(fileName, 0) + 1;
duplicates.put(fileName, count);
if (count > 1) {
fileName = FilenameUtils.getBaseName(fileName) + " (" + count + ")." + extension;
}
try {
byte[] contents = IOUtils.toByteArray(bodyPartEntity.getInputStream());
AnalysisResultFileContent resultFileContent = new AnalysisResultFileContent(analysisExecution, fileName,
bodyPart.getMediaType().getType(), contents);
contentList.getFiles().add(resultFileContent);
} catch (IOException e) {
throw new ScriptCallbackException("Unable to read result " + "files");
}
}
}
AnalysisResultFileContent resultFileContent = new AnalysisResultFileContent(analysisExecution, "stdout.txt", MediaType.TEXT_PLAIN,
analysisResultDTO.getStdout().getBytes());
contentList.getFiles().add(resultFileContent);
// We have to filter all files for current execution because of possibility of archives split into volumes
// Volumes will be removed during decompressing and compressing
contentList = sensitiveInfoService.filterSensitiveInfo(contentList, variables);
List<AnalysisResultFileContent> analysisRepackResult = analysisZipRepackService.process(contentList.getFiles(), zipVolumeSizeMb);
analysisResultFileContentRepository.save(analysisRepackResult);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/controller/ScriptExecutionController.java | src/main/java/org/ohdsi/webapi/executionengine/controller/ScriptExecutionController.java | package org.ohdsi.webapi.executionengine.controller;
import org.ohdsi.webapi.executionengine.repository.AnalysisExecutionRepository;
import org.ohdsi.webapi.executionengine.service.ExecutionEngineStatus;
import org.ohdsi.webapi.executionengine.service.ExecutionEngineStatusService;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.source.SourceRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import javax.persistence.EntityManager;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
/**
* REST Services related to working with Arachne Execution Engine
* Services
*
* @summary Arachne Execution Engine
*/
@Component
@Path("/executionservice")
public class ScriptExecutionController implements GeneratesNotification {
public static final String SCRIPT_TYPE = "scriptType";
private static final String FOLDING_KEY = "foldingKey";
private static final String NAME = "executionEngine";
private static final String COHORT_ID = "cohortId";
private final Logger logger = LoggerFactory.getLogger(ScriptExecutionController.class);
@Value("${executionengine.resultCallback}")
private String resultCallback;
@Value("${executionengine.updateStatusCallback}")
private String updateStatusCallback;
private final ScriptExecutionService scriptExecutionService;
private final StepBuilderFactory stepBuilderFactory;
private final JobBuilderFactory jobBuilders;
private final JobTemplate jobTemplate;
private final AnalysisExecutionRepository analysisExecutionRepository;
private final EntityManager entityManager;
private final ExecutionEngineStatusService executionEngineStatusService;
private SourceRepository sourceRepository;
@Autowired
public ScriptExecutionController(final ScriptExecutionService scriptExecutionService,
final StepBuilderFactory stepBuilderFactory,
final JobBuilderFactory jobBuilders,
final JobTemplate jobTemplate,
final AnalysisExecutionRepository analysisExecutionRepository,
final EntityManager entityManager,
final ExecutionEngineStatusService executionEngineStatusService,
final SourceRepository sourceRepository) {
this.scriptExecutionService = scriptExecutionService;
this.stepBuilderFactory = stepBuilderFactory;
this.jobBuilders = jobBuilders;
this.jobTemplate = jobTemplate;
this.analysisExecutionRepository = analysisExecutionRepository;
this.entityManager = entityManager;
this.executionEngineStatusService = executionEngineStatusService;
this.sourceRepository = sourceRepository;
}
/**
* Get the execution status by execution ID
*
* @summary Get an execution status by ID
* @param executionId The execution ID
* @return The status
*/
@Path("execution/status/{executionId}")
@GET
public String getStatus(@PathParam("executionId") Long executionId) {
return scriptExecutionService.getExecutionStatus(executionId);
}
/**
* Get the execution status of the Arachne Execution Engine
*
* @summary Get Arachne Execution Engine status
* @return The StatusResponse
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("status")
public StatusResponse getExecutionEngineStatus(){
return new StatusResponse(executionEngineStatusService.getExecutionEngineStatus());
}
@Override
public String getJobName() {
return NAME;
}
@Override
public String getExecutionFoldingKey() {
return FOLDING_KEY;
}
private class StatusResponse {
public StatusResponse(final ExecutionEngineStatus status) {
this.status = status;
}
public ExecutionEngineStatus status;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/dto/ExecutionRequestDTO.java | src/main/java/org/ohdsi/webapi/executionengine/dto/ExecutionRequestDTO.java | package org.ohdsi.webapi.executionengine.dto;
public class ExecutionRequestDTO {
public String sourceKey;
public String template;
public String exposureTable;
public String outcomeTable;
public Integer cdmVersion;
public String workFolder;
public Integer cohortId;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/util/StringGenerationUtil.java | src/main/java/org/ohdsi/webapi/executionengine/util/StringGenerationUtil.java | package org.ohdsi.webapi.executionengine.util;
import java.util.UUID;
public class StringGenerationUtil {
public static String generateFileName(String extension) {
return generateRandomString() + "." + extension;
}
public static String generateRandomString() {
return UUID.randomUUID().toString().replace("-", "");
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionService.java | src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionService.java | package org.ohdsi.webapi.executionengine.service;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.ohdsi.webapi.source.Source;
public interface ScriptExecutionService {
void runScript(Long executionId, Source source, List<AnalysisFile> files, String updatePassword,
String executableFilename, String targetTable) throws Exception;
Source findSourceByKey(String key);
ExecutionEngineAnalysisStatus createAnalysisExecution(Long jobId, Source source, String password, List<AnalysisFile> analysisFiles);
String getExecutionStatus(Long executionId);
File getExecutionResult(Long executionId) throws IOException;
void invalidateExecutions(Date invalidateDate);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisResultFileContentSensitiveInfoServiceImpl.java | src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisResultFileContentSensitiveInfoServiceImpl.java | package org.ohdsi.webapi.executionengine.service;
import com.odysseusinc.arachne.execution_engine_common.util.CommonFileUtils;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.ohdsi.webapi.common.sensitiveinfo.AbstractSensitiveInfoService;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFile;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContent;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContentList;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.io.Files.createTempDir;
@Service
public class AnalysisResultFileContentSensitiveInfoServiceImpl extends AbstractSensitiveInfoService implements AnalysisResultFileContentSensitiveInfoService {
private final String EXTENSION_ALL = "*";
private final String EXTENSION_EMPTY = "-";
private Set<String> sensitiveExtensions;
@Value("${sensitiveinfo.analysis.extensions}")
private String[] sensitiveAnalysisExtensions;
@PostConstruct
public void init() {
super.init();
sensitiveExtensions = new HashSet<>();
if (sensitiveAnalysisExtensions != null && sensitiveAnalysisExtensions.length > 0) {
// If there is "*" symbol - ignore other values
for (String value : sensitiveAnalysisExtensions) {
if (EXTENSION_ALL.equals(value)) {
sensitiveExtensions.clear();
sensitiveExtensions.add(EXTENSION_ALL);
break;
} else {
sensitiveExtensions.add(value.trim());
}
}
}
}
@Override
// IMPORTANT: All volumes of multivolume archives will be merged into one volume
public AnalysisResultFileContentList filterSensitiveInfo(AnalysisResultFileContentList source, Map<String, Object> variables, boolean isAdmin) {
File temporaryDir = createTempDir();
try {
// Save all files to be able to process multivolume archives
Map<AnalysisResultFileContent, Path> paths = saveFiles(temporaryDir, source.getFiles());
paths.forEach((file, path) -> {
// Archive volumes will be processed as entire archive
if(!AnalysisZipUtils.isArchiveVolume(path)) {
processFile(path, variables);
}
});
for(Iterator<Map.Entry<AnalysisResultFileContent, Path>> iter = paths.entrySet().iterator(); iter.hasNext();) {
Map.Entry<AnalysisResultFileContent, Path> entry = iter.next();
AnalysisResultFileContent fileContent = entry.getKey();
Path path = entry.getValue();
// If file does not exist then it was a part of multivolume archive and was deleted
if(path.toFile().exists()) {
byte[] content = Files.readAllBytes(path);
fileContent.setContents(content);
} else {
// Path contains information about archive volume, must be deleted
// because we create new archive without volumes
iter.remove();
}
}
source.getFiles().retainAll(paths.keySet());
} catch (Exception e) {
LOGGER.error("Files filtering error", e);
source.setHasErrors(true);
} finally {
FileUtils.deleteQuietly(temporaryDir);
}
return source;
}
@Override
public boolean isAdmin() {
return false;
}
private Map<AnalysisResultFileContent, Path> saveFiles(File tempDir, List<AnalysisResultFileContent> files) throws Exception{
Map<AnalysisResultFileContent, Path> paths = new HashedMap();
for (AnalysisResultFileContent file : files) {
try {
AnalysisResultFile analysisResultFile = file.getAnalysisResultFile();
Path path = new File(tempDir, analysisResultFile.getFileName()).toPath();
paths.put(file, path);
Files.write(path, file.getContents(), StandardOpenOption.CREATE_NEW);
} catch (Exception e) {
LOGGER.error("File writing error for file with id: {}", file.getAnalysisResultFile().getId(), e);
throw e;
}
}
return paths;
}
private Path doFilterSensitiveInfo(Path path, Map<String, Object> variables) throws IOException {
if (isFilteringRequired(path)) {
byte[] bytes = Files.readAllBytes(path);
final String value = filterSensitiveInfo(new String(bytes), variables, isAdmin());
Files.write(path, value.getBytes(), StandardOpenOption.TRUNCATE_EXISTING);
}
return path;
}
private boolean isFilteringRequired(Path path) {
return checkExtension(FilenameUtils.getExtension(path.getFileName().toString()));
}
private boolean checkExtension(String extension) {
if(sensitiveExtensions.contains(EXTENSION_ALL)) {
return true;
}
if (extension == null || extension.isEmpty()) {
return sensitiveExtensions.contains(EXTENSION_EMPTY);
} else {
return sensitiveExtensions.contains(extension);
}
}
private void processArchive(Path zipPath, Map<String, Object> variables) {
File temporaryDir = createTempDir();
try {
CommonFileUtils.unzipFiles(zipPath.toFile(), temporaryDir);
AnalysisZipUtils.deleteZipWithVolumes(zipPath);
Files.list(temporaryDir.toPath()).forEach(path -> {
try {
process(path, variables);
} catch (IOException e) {
LOGGER.error("File processing error: {}", path.getFileName().toString(), e);
}
});
CommonFileUtils.compressAndSplit(temporaryDir, zipPath.toFile(), null);
} catch (ZipException e) {
LOGGER.error("Error unzipping file", e);
} catch (IOException e) {
LOGGER.error("File writing error", e);
} finally {
FileUtils.deleteQuietly(temporaryDir);
}
}
private void process(Path path, Map<String, Object> variables) throws IOException {
if (path.toFile().isDirectory()) {
Files.list(path).forEach(child -> {
try {
process(child, variables);
} catch (IOException e) {
LOGGER.error("File processing error: {}", child.getFileName().toString(), e);
}
});
} else {
processFile(path, variables);
}
}
private void processFile(Path path, Map<String, Object> variables) {
try {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("File for process: {}", path.toString());
}
if (AnalysisZipUtils.isArchive(path.getFileName().toString())) {
// If file is archive - decompress it first
processArchive(path, variables);
} else if (!AnalysisZipUtils.isArchiveVolume(path)) {
doFilterSensitiveInfo(path, variables);
}
} catch (IOException e) {
LOGGER.error("File filtering error: '{}'", path.getFileName().toString(), e);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineAnalysisStatusInvalidationScheduler.java | src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineAnalysisStatusInvalidationScheduler.java | package org.ohdsi.webapi.executionengine.service;
import java.util.Date;
import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
//Cannot use ConditionalOnProperty annotation here, because it checks only existence of the property, but not null/empty values
@ConditionalOnExpression(
"!T(org.springframework.util.StringUtils).isEmpty('${execution.invalidation.maxage.hours:}') && " +
"!T(org.springframework.util.StringUtils).isEmpty('${execution.invalidation.period:}')")
@Service
public class ExecutionEngineAnalysisStatusInvalidationScheduler {
@Value("${execution.invalidation.maxage.hours}")
private int invalidateHours;
private ScriptExecutionService scriptExecutionService;
@Autowired
public ExecutionEngineAnalysisStatusInvalidationScheduler(ScriptExecutionService scriptExecutionService) {
this.scriptExecutionService = scriptExecutionService;
}
@Scheduled(fixedDelayString = "${execution.invalidation.period}")
public void invalidateExecutions() {
Date invalidate = DateUtils.addHours(new Date(), -invalidateHours);
scriptExecutionService.invalidateExecutions(invalidate);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisZipUtils.java | src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisZipUtils.java | package org.ohdsi.webapi.executionengine.service;
import static com.google.common.io.Files.createTempDir;
import com.odysseusinc.arachne.execution_engine_common.util.CommonFileUtils;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.ZipParameters;
import net.lingala.zip4j.model.enums.CompressionLevel;
import net.lingala.zip4j.model.enums.CompressionMethod;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.exception.AtlasException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AnalysisZipUtils {
protected static final Logger LOGGER = LoggerFactory.getLogger(AnalysisZipUtils.class);
private static final String EXTENSION_ZIP = "zip";
private static final String ZIP_VOLUME_EXT_PATTERN = "z[0-9]+$";
public static boolean isArchiveVolume(Path path) {
String filename = path.getFileName().toString();
return isArchiveVolume(filename);
}
public static boolean isArchiveVolume(String filename) {
String extension = FilenameUtils.getExtension(filename);
Pattern pattern = Pattern.compile(ZIP_VOLUME_EXT_PATTERN);
Matcher matcher = pattern.matcher(extension);
return matcher.find();
}
public static boolean isArchive(String filename) {
String extension = FilenameUtils.getExtension(filename);
return EXTENSION_ZIP.equalsIgnoreCase(extension);
}
public static boolean isResultArchive(String filename) {
return isArchive(filename) && StringUtils.containsIgnoreCase(filename, "result");
}
public static boolean isResultArchiveVolume(String filename) {
return isArchiveVolume(filename) && StringUtils.containsIgnoreCase(filename, "result");
}
public static Path createFileInTempDir(File tempDir, String fileName, byte[] contents) {
try {
Path path = new File(tempDir, fileName).toPath();
Files.write(path, contents, StandardOpenOption.CREATE_NEW);
return path;
} catch (Exception e) {
LOGGER.error("File writing error for file: {}", fileName, e);
throw new AtlasException(e);
}
}
public static void repackZipWithMultivalue(Path zipPath, int zipVolumeSizeMb) {
File temporaryDir = createTempDir();
try {
CommonFileUtils.unzipFiles(zipPath.toFile(), temporaryDir);
AnalysisZipUtils.deleteZipWithVolumes(zipPath);
long zipChunkSizeInBytes = zipVolumeSizeMb * 1024 * 1024;
CommonFileUtils.compressAndSplit(
temporaryDir,
zipPath.toFile(),
zipChunkSizeInBytes
);
} catch (ZipException e) {
LOGGER.error("Error unzipping file", e);
} catch (IOException e) {
LOGGER.error("File writing error", e);
} finally {
FileUtils.deleteQuietly(temporaryDir);
}
}
public static ZipParameters getHeadersForFilesThatWillBeAddedToZip(String fileName) {
ZipParameters parameters = new ZipParameters();
parameters.setCompressionMethod(CompressionMethod.DEFLATE);
parameters.setCompressionLevel(CompressionLevel.MAXIMUM);
parameters.setIncludeRootFolder(false);
parameters.setReadHiddenFiles(false);
parameters.setFileNameInZip(fileName); // this would be the name of the file for this entry in the zip file
return parameters;
}
public static void deleteZipWithVolumes(Path zipPath) throws ZipException {
ZipFile zipFile = new ZipFile(zipPath.toFile());
List<File> files = zipFile.getSplitZipFiles();
files.forEach(File::delete);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatusServiceImpl.java | src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatusServiceImpl.java | package org.ohdsi.webapi.executionengine.service;
import static org.ohdsi.webapi.executionengine.service.ExecutionEngineStatus.OFFLINE;
import static org.ohdsi.webapi.executionengine.service.ExecutionEngineStatus.ONLINE;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import org.ohdsi.webapi.service.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@Service
public class ExecutionEngineStatusServiceImpl implements ExecutionEngineStatusService{
private volatile ExecutionEngineStatus executionEngineStatus = OFFLINE;
private final Logger logger = LoggerFactory.getLogger(ExecutionEngineStatusServiceImpl.class);
private final HttpClient client;
private final String executionEngineUrl;
private final String executionEngineToken;
@Autowired
public ExecutionEngineStatusServiceImpl(
final HttpClient client,
@Value("${executionengine.url}")
final String executionEngineURL,
@Value("${executionengine.token}")
final String executionEngineToken) {
this.client = client;
this.executionEngineUrl = executionEngineURL;
this.executionEngineToken = executionEngineToken;
}
@Scheduled(fixedDelayString = "${execution.status.period}")
public void checkExecutionEngineStatus() {
WebTarget webTarget = client.target(executionEngineUrl + "/metrics");
try {
webTarget
.request(MediaType.TEXT_PLAIN)
.accept(MediaType.TEXT_PLAIN)
.header("Authorization", executionEngineToken)
.get();
if (!isExecutionEngineOnline()) {
logger.info("Execution engine is online");
}
this.executionEngineStatus = ONLINE;
} catch (Exception e) {
if (isExecutionEngineOnline()) {
logger.error("Execution is unavailable, due to {}", e.getMessage());
}
this.executionEngineStatus = OFFLINE;
}
}
@Override
public ExecutionEngineStatus getExecutionEngineStatus() {
return this.executionEngineStatus;
}
private boolean isExecutionEngineOnline() {
return ONLINE.equals(this.executionEngineStatus);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisZipRepackService.java | src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisZipRepackService.java | package org.ohdsi.webapi.executionengine.service;
import static com.google.common.io.Files.createTempDir;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.io.FileUtils;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFile;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContent;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
* This class responsible for:
* <li> if the analysis results file is too large, then it is split into volumes, which allows us to store huge data in DB without a headache
*/
@Service
public class AnalysisZipRepackService {
protected static final Logger LOGGER = LoggerFactory.getLogger(AnalysisZipRepackService.class);
public static final String MEDIA_TYPE = "application";
public List<AnalysisResultFileContent> process(List<AnalysisResultFileContent> originalFileContent, int zipVolumeSizeMb) {
if (CollectionUtils.isEmpty(originalFileContent)) {
return originalFileContent;
}
File temporaryDir = createTempDir();
List<AnalysisResultFileContent> analysisRepackResult = originalFileContent;
AnalysisResultFileContent analysisResultZip = originalFileContent.stream()
.filter(content -> AnalysisZipUtils.isResultArchive(content.getAnalysisResultFile().getFileName()))
.filter(content -> content.getContents().length > zipVolumeSizeMb * 1024 * 1024)
.findFirst().orElse(null);
if (analysisResultZip != null) {
try {
List<AnalysisResultFileContent> contentsWithoutAnalysisResultZip = originalFileContent.stream()
.filter(content -> !content.equals(analysisResultZip))
.collect(Collectors.toList());
Path analysisResultZipPath = AnalysisZipUtils.createFileInTempDir(temporaryDir, analysisResultZip.getAnalysisResultFile().getFileName(), analysisResultZip.getContents());
AnalysisZipUtils.repackZipWithMultivalue(analysisResultZipPath, zipVolumeSizeMb);
List<AnalysisResultFileContent> contentsForRepackedAnalysisResultZip = getContentsForMultivalueZip(temporaryDir, analysisResultZip);
analysisRepackResult = ListUtils.union(contentsWithoutAnalysisResultZip, contentsForRepackedAnalysisResultZip);
} catch (Exception e) {
LOGGER.error("Cannot split archives", e);
} finally {
FileUtils.deleteQuietly(temporaryDir);
}
}
return analysisRepackResult;
}
private List<AnalysisResultFileContent> getContentsForMultivalueZip(File temporaryDir, AnalysisResultFileContent analysisResultContent) throws IOException {
ExecutionEngineAnalysisStatus execution = analysisResultContent.getAnalysisResultFile().getExecution();
List<AnalysisResultFileContent> resultFileContents = new ArrayList<>();
for (File file : temporaryDir.listFiles()) {
Path path = file.toPath();
AnalysisResultFileContent analysisResultFileContent = new AnalysisResultFileContent();
analysisResultFileContent.setAnalysisResultFile(getFileMetadata(path, execution));
analysisResultFileContent.setContents(Files.readAllBytes(path));
resultFileContents.add(analysisResultFileContent);
}
return resultFileContents;
}
private AnalysisResultFile getFileMetadata(Path path, ExecutionEngineAnalysisStatus execution) {
AnalysisResultFile analysisResultFile = new AnalysisResultFile();
analysisResultFile.setFileName(path.getFileName().toString());
analysisResultFile.setMediaType(MEDIA_TYPE);
analysisResultFile.setExecution(execution);
return analysisResultFile;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java | src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java | package org.ohdsi.webapi.executionengine.service;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AnalysisRequestDTO;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AnalysisRequestStatusDTO;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.DataSourceUnsecuredDTO;
import com.odysseusinc.arachne.execution_engine_common.util.CommonFileUtils;
import java.nio.file.Path;
import java.util.Optional;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.FileHeader;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.text.StrSubstitutor;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.MultiPart;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
import org.ohdsi.webapi.JobInvalidator;
import org.ohdsi.webapi.exception.AtlasException;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFile;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import org.ohdsi.webapi.executionengine.repository.AnalysisExecutionRepository;
import org.ohdsi.webapi.executionengine.repository.ExecutionEngineGenerationRepository;
import org.ohdsi.webapi.executionengine.repository.InputFileRepository;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.service.HttpClient;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.util.DataSourceDTOParser;
import org.ohdsi.webapi.util.SourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import javax.net.ssl.HttpsURLConnection;
import javax.transaction.Transactional;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import java.io.*;
import java.nio.file.Files;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import static org.ohdsi.webapi.executionengine.service.AnalysisZipUtils.getHeadersForFilesThatWillBeAddedToZip;
@Service
@Transactional
class ScriptExecutionServiceImpl extends AbstractDaoService implements ScriptExecutionService {
private static final Logger logger = LoggerFactory.getLogger(ScriptExecutionServiceImpl.class);
private static final String REQUEST_FILENAME = "request.zip";
private static final String AUTHORIZATION_HEADER = "Authorization";
private static final String ARACHNE_COMPRESSED_HEADER = "arachne-compressed";
private static final String ARACHNE_WAITING_COMPRESSED_RESULT_HEADER = "arachne-waiting-compressed-result";
private static final String TEMPDIR_PREFIX = "webapi-exec";
@Autowired
private HttpClient client;
@Value("${executionengine.url}")
private String executionEngineURL;
@Value("${executionengine.token}")
private String executionEngineToken;
@Value("${executionengine.resultCallback}")
private String resultCallback;
@Value("${executionengine.updateStatusCallback}")
private String updateStatusCallback;
@Value("${executionengine.resultExclusions}")
private String resultExclusions;
private static List<ExecutionEngineAnalysisStatus.Status> INVALIDATE_STATUSES = ImmutableList.of(
ExecutionEngineAnalysisStatus.Status.RUNNING,
ExecutionEngineAnalysisStatus.Status.STARTED,
ExecutionEngineAnalysisStatus.Status.PENDING
);
@Autowired
private SourceService sourceService;
@Autowired
private InputFileRepository inputFileRepository;
@Autowired
private JobExplorer jobExplorer;
@Autowired
private JobInvalidator jobInvalidator;
@Autowired
private AnalysisExecutionRepository analysisExecutionRepository;
@Autowired
private ExecutionEngineGenerationRepository executionEngineGenerationRepository;
@Autowired
private SourceAccessor sourceAccessor;
ScriptExecutionServiceImpl() throws KeyManagementException, NoSuchAlgorithmException {
HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true);
}
@Override
public void runScript(Long executionId, Source source, List<AnalysisFile> files, String updatePassword,
String executableFilename, String targetTable) {
DataSourceUnsecuredDTO dataSourceData = DataSourceDTOParser.parseDTO(source);
dataSourceData.setCohortTargetTable(targetTable);
dataSourceData.setTargetSchema(SourceUtils.getTempQualifier(source));
final String analysisExecutionUrl = "/analyze";
WebTarget webTarget = client.target(executionEngineURL + analysisExecutionUrl);
try{
File tempDir = Files.createTempDirectory(TEMPDIR_PREFIX).toFile();
try{
saveFilesToTempDir(tempDir, files);
try(MultiPart multiPart = buildRequest(buildAnalysisRequest(executionId, dataSourceData, updatePassword, executableFilename), tempDir)) {
File zipFile = new File(tempDir, REQUEST_FILENAME);
CommonFileUtils.compressAndSplit(tempDir, zipFile, null);
try(InputStream in = new FileInputStream(zipFile)) {
StreamDataBodyPart filePart = new StreamDataBodyPart("file", in, zipFile.getName());
multiPart.bodyPart(filePart);
webTarget
.request(MediaType.MULTIPART_FORM_DATA_TYPE)
.accept(MediaType.APPLICATION_JSON)
.header(AUTHORIZATION_HEADER, executionEngineToken)
.header(ARACHNE_COMPRESSED_HEADER, "true")
.header(ARACHNE_WAITING_COMPRESSED_RESULT_HEADER, "true")
.post(Entity.entity(multiPart, multiPart.getMediaType()),
AnalysisRequestStatusDTO.class);
}
}
}finally {
FileUtils.deleteQuietly(tempDir);
}
}catch (IOException e) {
log.error("Failed to compress request files", e);
throw new InternalServerErrorException(e);
}
}
@Override
public Source findSourceByKey(final String key) {
return sourceService.findBySourceKey(key);
}
private void saveFilesToTempDir(File tempDir, List<AnalysisFile> files) {
files.forEach(file -> {
try(OutputStream out = new FileOutputStream(new File(tempDir, file.getFileName()))) {
IOUtils.write(file.getContents(), out);
}catch (IOException e) {
log.error("Cannot build request to ExecutionEngine", e);
throw new InternalServerErrorException();
}
});
}
private MultiPart buildRequest(AnalysisRequestDTO analysisRequestDTO, File tempDir) throws ZipException, IOException {
MultiPart multiPart = new MultiPart();
multiPart.setMediaType(MediaType.MULTIPART_FORM_DATA_TYPE);
multiPart.bodyPart(
new FormDataBodyPart("analysisRequest", analysisRequestDTO,
MediaType.APPLICATION_JSON_TYPE));
return multiPart;
}
private AnalysisRequestDTO buildAnalysisRequest(Long executionId, DataSourceUnsecuredDTO dataSourceData, String password,
String executableFileName) {
AnalysisRequestDTO analysisRequestDTO = new AnalysisRequestDTO();
analysisRequestDTO.setId(executionId);
analysisRequestDTO.setDataSource(dataSourceData);
analysisRequestDTO.setCallbackPassword(password);
analysisRequestDTO.setRequested(new Date());
analysisRequestDTO.setExecutableFileName(executableFileName);
analysisRequestDTO.setResultExclusions(resultExclusions);
analysisRequestDTO.setResultCallback(
StrSubstitutor.replace(resultCallback,
ImmutableMap.of("id", executionId,
"password", password
),
"{", "}"));
analysisRequestDTO.setUpdateStatusCallback(
StrSubstitutor.replace(updateStatusCallback,
ImmutableMap.of("id", executionId,
"password", password
),
"{", "}"));
return analysisRequestDTO;
}
@Override
public ExecutionEngineAnalysisStatus createAnalysisExecution(Long jobId, Source source, String password, List<AnalysisFile> analysisFiles) {
ExecutionEngineGenerationEntity executionEngineGenerationEntity = executionEngineGenerationRepository.findOne(jobId);
ExecutionEngineAnalysisStatus execution = new ExecutionEngineAnalysisStatus();
execution.setExecutionStatus(ExecutionEngineAnalysisStatus.Status.STARTED);
execution.setExecutionEngineGeneration(executionEngineGenerationEntity);
ExecutionEngineAnalysisStatus saved = analysisExecutionRepository.saveAndFlush(execution);
if (Objects.nonNull(analysisFiles)) {
analysisFiles.forEach(file -> file.setAnalysisExecution(saved));
inputFileRepository.save(analysisFiles);
}
return saved;
}
@Override
public String getExecutionStatus(Long executionId) {
String status;
JobExecution execution = jobExplorer.getJobExecution(executionId);
if (execution.getExecutionContext().containsKey("engineExecutionId")) {
Long execId = execution.getExecutionContext().getLong("engineExecutionId");
ExecutionEngineAnalysisStatus analysisExecution = analysisExecutionRepository.findOne(execId.intValue());
if (analysisExecution == null) {
throw new NotFoundException(String.format("Execution with id=%d was not found", executionId));
}
status = analysisExecution.getExecutionStatus().name();
} else {
status = ExecutionEngineAnalysisStatus.Status.PENDING.name();
}
return status;
}
@Override
public void invalidateExecutions(Date invalidateDate) {
getTransactionTemplateRequiresNew().execute(status -> {
logger.info("Invalidating execution engine based analyses");
List<ExecutionEngineAnalysisStatus> executions = analysisExecutionRepository.findAllInvalidAnalysis(invalidateDate, ScriptExecutionServiceImpl.INVALIDATE_STATUSES);
executions.forEach(exec -> {
exec.setExecutionStatus(ExecutionEngineAnalysisStatus.Status.FAILED);
jobInvalidator.invalidateJobExecutionById(exec);
});
analysisExecutionRepository.save(executions);
return null;
});
}
@PostConstruct
public void invalidateOutdatedAnalyses() {
invalidateExecutions(new Date());
}
@Override
public File getExecutionResult(Long executionId) throws IOException {
ExecutionEngineGenerationEntity executionEngineGeneration = executionEngineGenerationRepository.findById(executionId)
.orElseThrow(NotFoundException::new);
sourceAccessor.checkAccess(executionEngineGeneration.getSource());
ExecutionEngineAnalysisStatus analysisExecution = executionEngineGeneration.getAnalysisExecution();
java.nio.file.Path tempDirectory = Files.createTempDirectory("atlas_ee_arch");
String fileName = "execution_" + executionId + "_result.zip";
File archive = tempDirectory.resolve(fileName).toFile();
archive.deleteOnExit();
try {
ZipFile resultZip = new ZipFile(archive);
List<AnalysisResultFile> zipFiles = analysisExecution.getResultFiles().stream()
.filter(resultFile ->
AnalysisZipUtils.isResultArchive(resultFile.getFileName()) ||
AnalysisZipUtils.isResultArchiveVolume(resultFile.getFileName()))
.collect(Collectors.toList());
List<AnalysisResultFile> otherFiles = analysisExecution.getResultFiles().stream()
.filter(resultFile -> !zipFiles.contains(resultFile))
.collect(Collectors.toList());
for (AnalysisResultFile resultFile : otherFiles) {
addFileToZip(resultZip, resultFile);
}
copyContentOfOneZipToAnotherZip(zipFiles, resultZip,tempDirectory);
} catch (ZipException e) {
throw new AtlasException("Cannot process zip archive result", e);
}
return archive;
}
private void addFileToZip(ZipFile resultZip, AnalysisResultFile resultFile) throws ZipException {
resultZip.addStream(
new ByteArrayInputStream(resultFile.getContents()),
getHeadersForFilesThatWillBeAddedToZip(resultFile.getFileName())
);
}
private void copyContentOfOneZipToAnotherZip(List<AnalysisResultFile> zipWithMultivolume, ZipFile resultZip, Path tempDirectory) throws IOException, ZipException {
if (CollectionUtils.isEmpty(zipWithMultivolume)) {
return;
}
Optional<AnalysisResultFile> zipAnalysisFileOpt = zipWithMultivolume.stream()
.filter(file -> AnalysisZipUtils.isArchive(file.getFileName()))
.findFirst();
if (zipAnalysisFileOpt.isPresent()) {
AnalysisResultFile zipAnalysisFile = zipAnalysisFileOpt.orElse(null);
File zipFile = saveZipFileToTempDirectory(zipAnalysisFile, tempDirectory);
saveZipVolumeFilesToTempDirectory(zipWithMultivolume, tempDirectory);
ZipFile outZipFile = new ZipFile(zipFile);
//getFileHeaders return not generic List, that is already fixed in the last version of library
for (FileHeader header : (List< FileHeader>) outZipFile.getFileHeaders()) {
resultZip.addStream(
outZipFile.getInputStream(header),
getHeadersForFilesThatWillBeAddedToZip(header.getFileName())
);
}
}
}
private void saveZipVolumeFilesToTempDirectory(List<AnalysisResultFile> resultFiles, Path tempDirectory) throws IOException {
for (AnalysisResultFile resultFile : resultFiles) {
if (AnalysisZipUtils.isResultArchiveVolume(resultFile.getFileName())) {
saveZipFileToTempDirectory(resultFile, tempDirectory);
}
}
}
private File saveZipFileToTempDirectory(AnalysisResultFile resultFile, Path tempDirectory) throws IOException {
File file = tempDirectory.resolve(resultFile.getFileName()).toFile();
FileUtils.writeByteArrayToFile(file, resultFile.getContents());
return file;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatus.java | src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatus.java | package org.ohdsi.webapi.executionengine.service;
public enum ExecutionEngineStatus {
ONLINE,
OFFLINE
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisResultFileContentSensitiveInfoService.java | src/main/java/org/ohdsi/webapi/executionengine/service/AnalysisResultFileContentSensitiveInfoService.java | package org.ohdsi.webapi.executionengine.service;
import org.ohdsi.webapi.common.sensitiveinfo.SensitiveInfoService;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContentList;
public interface AnalysisResultFileContentSensitiveInfoService extends SensitiveInfoService<AnalysisResultFileContentList> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatusService.java | src/main/java/org/ohdsi/webapi/executionengine/service/ExecutionEngineStatusService.java | package org.ohdsi.webapi.executionengine.service;
public interface ExecutionEngineStatusService {
ExecutionEngineStatus getExecutionEngineStatus();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/exception/ScriptCallbackException.java | src/main/java/org/ohdsi/webapi/executionengine/exception/ScriptCallbackException.java | package org.ohdsi.webapi.executionengine.exception;
public class ScriptCallbackException extends RuntimeException {
public ScriptCallbackException(String message) {
super(message);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/repository/ExecutionEngineGenerationRepository.java | src/main/java/org/ohdsi/webapi/executionengine/repository/ExecutionEngineGenerationRepository.java | package org.ohdsi.webapi.executionengine.repository;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
@Repository
public interface ExecutionEngineGenerationRepository
extends CrudRepository<ExecutionEngineGenerationEntity, Long> {
Optional<ExecutionEngineGenerationEntity> findById(Long id);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/repository/AnalysisResultFileContentRepository.java | src/main/java/org/ohdsi/webapi/executionengine/repository/AnalysisResultFileContentRepository.java | package org.ohdsi.webapi.executionengine.repository;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFileContent;
import org.springframework.data.jpa.repository.JpaRepository;
public interface AnalysisResultFileContentRepository extends JpaRepository<AnalysisResultFileContent, Integer> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/repository/AnalysisExecutionRepository.java | src/main/java/org/ohdsi/webapi/executionengine/repository/AnalysisExecutionRepository.java | package org.ohdsi.webapi.executionengine.repository;
import java.util.Date;
import java.util.List;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
public interface AnalysisExecutionRepository extends JpaRepository<ExecutionEngineAnalysisStatus, Integer> {
@Query(" SELECT st FROM ExecutionEngineAnalysisStatus st JOIN st.executionEngineGeneration ge " +
" WHERE st.executionStatus in(:statuses) " +
" AND ge.startTime < :invalidate ")
List<ExecutionEngineAnalysisStatus> findAllInvalidAnalysis(
@Param("invalidate") Date invalidate,
@Param("statuses") List<ExecutionEngineAnalysisStatus.Status> statuses
);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/repository/OutputFileRepository.java | src/main/java/org/ohdsi/webapi/executionengine/repository/OutputFileRepository.java | package org.ohdsi.webapi.executionengine.repository;
import java.util.List;
import org.ohdsi.webapi.executionengine.entity.AnalysisResultFile;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface OutputFileRepository
extends CrudRepository<AnalysisResultFile, Long> {
List<AnalysisResultFile> findByExecutionId(Integer executionId);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/repository/InputFileRepository.java | src/main/java/org/ohdsi/webapi/executionengine/repository/InputFileRepository.java | package org.ohdsi.webapi.executionengine.repository;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface InputFileRepository
extends CrudRepository<AnalysisFile, Long> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFile.java | src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFile.java | package org.ohdsi.webapi.executionengine.entity;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import javax.persistence.*;
@Entity(name = "output_files")
public class AnalysisResultFile {
@Id
@GenericGenerator(
name = "analysis_result_file_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "output_file_seq"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "analysis_result_file_generator")
@Column
private Long id;
@ManyToOne(optional = false, fetch = FetchType.LAZY)
@JoinColumn(name = "execution_id", nullable = false, updatable = false)
private ExecutionEngineAnalysisStatus execution;
@Column(name = "file_name")
private String fileName;
@Column(name = "media_type")
private String mediaType;
@OneToOne(optional = false, mappedBy = "analysisResultFile", fetch = FetchType.LAZY)
private AnalysisResultFileContent content;
public AnalysisResultFile() {
}
public AnalysisResultFile(
ExecutionEngineAnalysisStatus execution,
String fileName,
String mediaType) {
this.execution = execution;
this.fileName = fileName;
this.mediaType = mediaType;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public ExecutionEngineAnalysisStatus getExecution() {
return execution;
}
public void setExecution(ExecutionEngineAnalysisStatus execution) {
this.execution = execution;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public byte[] getContents() {
return content.getContents();
}
public void setContents(byte[] contents) {
this.content.setContents(contents);
}
public String getMediaType() {
return mediaType;
}
public void setMediaType(String mediaType) {
this.mediaType = mediaType;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/ExecutionEngineGenerationEntity.java | src/main/java/org/ohdsi/webapi/executionengine/entity/ExecutionEngineGenerationEntity.java | package org.ohdsi.webapi.executionengine.entity;
import org.ohdsi.webapi.common.generation.CommonGeneration;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
@Entity
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
public abstract class ExecutionEngineGenerationEntity extends CommonGeneration {
@Column(name = "update_password")
private String updatePassword;
@OneToOne(targetEntity = ExecutionEngineAnalysisStatus.class, fetch = FetchType.LAZY)
@JoinColumn(name = "analysis_execution_id")
private ExecutionEngineAnalysisStatus analysisExecution;
public String getUpdatePassword() {
return updatePassword;
}
public void setUpdatePassword(String updatePassword) {
this.updatePassword = updatePassword;
}
public ExecutionEngineAnalysisStatus getAnalysisExecution() {
return analysisExecution;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/ExecutionEngineAnalysisStatus.java | src/main/java/org/ohdsi/webapi/executionengine/entity/ExecutionEngineAnalysisStatus.java | package org.ohdsi.webapi.executionengine.entity;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import javax.persistence.*;
import java.util.List;
@Entity
@Table(name = "ee_analysis_status")
public class ExecutionEngineAnalysisStatus {
public enum Status {
PENDING, STARTED, RUNNING, COMPLETED, FAILED
};
@Id
@GenericGenerator(
name = "analysis_execution_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "analysis_execution_sequence"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "analysis_execution_generator")
@Column(name = "id")
private Integer id;
@Column(name = "executionStatus")
@Enumerated(EnumType.STRING)
private Status executionStatus;
@ManyToOne(targetEntity = ExecutionEngineGenerationEntity.class, fetch = FetchType.LAZY)
@JoinColumn(name = "job_execution_id")
private ExecutionEngineGenerationEntity executionEngineGeneration;
@OneToMany(mappedBy = "execution", targetEntity = AnalysisResultFile.class, fetch = FetchType.LAZY)
private List<AnalysisResultFile> resultFiles;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Status getExecutionStatus() {
return executionStatus;
}
public void setExecutionStatus(Status executionStatus) {
this.executionStatus = executionStatus;
}
public ExecutionEngineGenerationEntity getExecutionEngineGeneration() {
return executionEngineGeneration;
}
public void setExecutionEngineGeneration(ExecutionEngineGenerationEntity executionEngineGeneration) {
this.executionEngineGeneration = executionEngineGeneration;
}
public List<AnalysisResultFile> getResultFiles() {
return resultFiles;
}
public void setResultFiles(List<AnalysisResultFile> resultFiles) {
this.resultFiles = resultFiles;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFileContentList.java | src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFileContentList.java | package org.ohdsi.webapi.executionengine.entity;
import java.util.ArrayList;
import java.util.List;
public class AnalysisResultFileContentList {
private List<AnalysisResultFileContent> files;
private boolean hasErrors;
public AnalysisResultFileContentList() {
this.files = new ArrayList<>();
}
public List<AnalysisResultFileContent> getFiles() {
return files;
}
public void setHasErrors(boolean hasErrors) {
this.hasErrors = hasErrors;
}
public boolean isSuccessfullyFiltered() {
return !hasErrors;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisFile.java | src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisFile.java | package org.ohdsi.webapi.executionengine.entity;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
@Entity(name = "input_files")
public class AnalysisFile {
@Id
@GenericGenerator(
name = "analysis_input_file_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "input_file_seq"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "analysis_input_file_generator")
@Column
private Long id;
@ManyToOne(optional = false)
@JoinColumn(name = "execution_id", nullable = false, updatable = false)
private ExecutionEngineAnalysisStatus analysisExecution;
@Column(name = "file_name")
private String fileName;
@Column(name = "file_contents", columnDefinition = "BYTEA")
@Basic(fetch = FetchType.LAZY)
private byte[] contents;
public AnalysisFile() {
}
public Long getId() {
return id;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public byte[] getContents() {
return contents;
}
public void setContents(byte[] contents) {
this.contents = contents;
}
public ExecutionEngineAnalysisStatus getAnalysisExecution() {
return analysisExecution;
}
public void setAnalysisExecution(ExecutionEngineAnalysisStatus analysisExecution) {
this.analysisExecution = analysisExecution;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFileContent.java | src/main/java/org/ohdsi/webapi/executionengine/entity/AnalysisResultFileContent.java | package org.ohdsi.webapi.executionengine.entity;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.MapsId;
import javax.persistence.OneToOne;
@Entity(name = "output_file_contents")
public class AnalysisResultFileContent {
@Id
@Column(name = "output_file_id")
private Long id;
@OneToOne
@JoinColumn(name = "output_file_id")
@MapsId
private AnalysisResultFile analysisResultFile;
@Column(name = "file_contents", columnDefinition = "BYTEA")
@Basic(fetch = FetchType.LAZY)
private byte[] contents;
public AnalysisResultFileContent() {
}
public AnalysisResultFileContent(ExecutionEngineAnalysisStatus execution,
String fileName,
String mediaType, byte[] contents) {
this.analysisResultFile = new AnalysisResultFile(execution, fileName, mediaType);
this.contents = contents;
}
public AnalysisResultFile getAnalysisResultFile() {
return analysisResultFile;
}
public void setAnalysisResultFile(AnalysisResultFile analysisResultFile) {
this.analysisResultFile = analysisResultFile;
}
public byte[] getContents() {
return contents;
}
public void setContents(byte[] contents) {
this.contents = contents;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/job/RunExecutionEngineTasklet.java | src/main/java/org/ohdsi/webapi/executionengine/job/RunExecutionEngineTasklet.java | package org.ohdsi.webapi.executionengine.job;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.ohdsi.webapi.source.Source;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;
import java.util.List;
import java.util.Map;
import static org.ohdsi.webapi.Constants.Params.EXECUTABLE_FILE_NAME;
import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE;
import static org.ohdsi.webapi.Constants.Params.UPDATE_PASSWORD;
public class RunExecutionEngineTasklet extends BaseExecutionTasklet {
public static final String SCRIPT_ID = "scriptId";
private final ScriptExecutionService executionService;
private final Source source;
private final List<AnalysisFile> analysisFiles;
public RunExecutionEngineTasklet(ScriptExecutionService executionService, Source source, List<AnalysisFile> analysisFiles) {
this.executionService = executionService;
this.source = source;
this.analysisFiles = analysisFiles;
}
@Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
final Long jobId = chunkContext.getStepContext().getStepExecution().getJobExecution().getId();
Map<String, Object> jobParams = chunkContext.getStepContext().getJobParameters();
final String updatePassword = jobParams.get(UPDATE_PASSWORD).toString();
final String executableFilename = jobParams.get(EXECUTABLE_FILE_NAME).toString();
final String targetTable = jobParams.get(TARGET_TABLE).toString();
executionService.runScript(jobId, source, analysisFiles, updatePassword, executableFilename, targetTable);
return RepeatStatus.FINISHED;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/job/BaseExecutionTasklet.java | src/main/java/org/ohdsi/webapi/executionengine/job/BaseExecutionTasklet.java | package org.ohdsi.webapi.executionengine.job;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.ExecutionContext;
public abstract class BaseExecutionTasklet implements Tasklet, StepExecutionListener {
private ExecutionContext executionContext;
@Override
public void beforeStep(final StepExecution stepExecution) {
this.executionContext = stepExecution
.getJobExecution()
.getExecutionContext();
}
@Override
public ExitStatus afterStep(final StepExecution stepExecution) {
return ExitStatus.COMPLETED;
}
public boolean contains(String key) {
return executionContext.containsKey(key);
}
public void put(final String key, final Object value) {
executionContext.put(key, value);
}
public Object get(final String key) {
return executionContext.get(key);
}
public void putInt(final String key, final int value) {
executionContext.putInt(key, value);
}
public int getInt(final String key) {
return executionContext.getInt(key);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/job/CreateAnalysisTasklet.java | src/main/java/org/ohdsi/webapi/executionengine/job/CreateAnalysisTasklet.java | package org.ohdsi.webapi.executionengine.job;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;
import java.util.List;
import java.util.Map;
public class CreateAnalysisTasklet extends BaseExecutionTasklet {
static final String ANALYSIS_EXECUTION_ID = "engineAnalysisExecutionId";
private final ScriptExecutionService service;
private final String sourceKey;
private List<AnalysisFile> analysisFiles;
private Integer analysisId;
public CreateAnalysisTasklet(ScriptExecutionService executionService, String sourceKey, List<AnalysisFile> analysisFiles) {
this.service = executionService;
this.sourceKey = sourceKey;
this.analysisFiles = analysisFiles;
}
@Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext context) throws Exception {
Long jobId = context.getStepContext().getStepExecution().getJobExecution().getId();
Map<String, Object> jobParams = context.getStepContext().getJobParameters();
final String updatePassword = jobParams.get(Constants.Params.UPDATE_PASSWORD).toString();
final ExecutionEngineAnalysisStatus createAnalysis = service.createAnalysisExecution(
jobId,
service.findSourceByKey(sourceKey),
updatePassword,
analysisFiles);
this.analysisId = createAnalysis.getId();
return RepeatStatus.FINISHED;
}
@Override
public ExitStatus afterStep(final StepExecution stepExecution) {
putInt(ANALYSIS_EXECUTION_ID, this.analysisId);
return ExitStatus.COMPLETED;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/executionengine/job/ExecutionEngineCallbackTasklet.java | src/main/java/org/ohdsi/webapi/executionengine/job/ExecutionEngineCallbackTasklet.java | package org.ohdsi.webapi.executionengine.job;
import static org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus.Status.COMPLETED;
import static org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus.Status.FAILED;
import java.util.Optional;
import javax.persistence.EntityManager;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineAnalysisStatus;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import org.ohdsi.webapi.executionengine.repository.ExecutionEngineGenerationRepository;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;
public class ExecutionEngineCallbackTasklet extends BaseExecutionTasklet {
private final ExecutionEngineGenerationRepository executionEngineGenerationRepository;
private final EntityManager entityManager;
private ExitStatus exitStatus;
public ExecutionEngineCallbackTasklet(ExecutionEngineGenerationRepository executionEngineGenerationRepository, final EntityManager entityManager) {
this.executionEngineGenerationRepository = executionEngineGenerationRepository;
this.entityManager = entityManager;
}
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
final Long jobId = chunkContext.getStepContext().getStepExecution().getJobExecution().getId();
while (true) {
entityManager.clear();
Optional<ExitStatus> exitStatusOptional = executionEngineGenerationRepository.findById(jobId)
.filter(g -> {
ExecutionEngineAnalysisStatus.Status status = g.getAnalysisExecution().getExecutionStatus();
return status == COMPLETED || status == FAILED;
})
.map(this::create);
if (exitStatusOptional.isPresent()) {
this.exitStatus = exitStatusOptional.get();
break;
}
Thread.sleep(3000);
}
return RepeatStatus.FINISHED;
}
private ExitStatus create(ExecutionEngineGenerationEntity executionEngineGenerationEntity) {
ExitStatus status = executionEngineGenerationEntity.getAnalysisExecution().getExecutionStatus() == FAILED ?
ExitStatus.FAILED :
ExitStatus.COMPLETED;
return status.addExitDescription(executionEngineGenerationEntity.getExitMessage());
}
@Override
public ExitStatus afterStep(StepExecution stepExecution) {
return this.exitStatus;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/plugins/AtlasGisConfig.java | src/main/java/org/ohdsi/webapi/plugins/AtlasGisConfig.java | package org.ohdsi.webapi.plugins;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnProperty(value = "atlasgis.enabled", havingValue = "true")
@ComponentScan("org.ohdsi.atlasgis")
public class AtlasGisConfig {}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/plugins/PluginsConfigurationInfo.java | src/main/java/org/ohdsi/webapi/plugins/PluginsConfigurationInfo.java | package org.ohdsi.webapi.plugins;
import org.ohdsi.info.ConfigurationInfo;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class PluginsConfigurationInfo extends ConfigurationInfo {
private static final String KEY = "plugins";
public PluginsConfigurationInfo(@Value("${atlasgis.enabled}") Boolean atlasgisEnabled) {
properties.put("atlasgisEnabled", atlasgisEnabled);
}
@Override
public String getKey() {
return KEY;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityReport.java | src/main/java/org/ohdsi/webapi/feasibility/FeasibilityReport.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import java.util.List;
import javax.xml.bind.annotation.XmlType;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public class FeasibilityReport {
@XmlType(name="Summary", namespace="http://ohdsi.org/webapi/feasibility")
public static class Summary {
public long totalPersons;
public long matchingPersons;
public String percentMatched;
}
public static class InclusionRuleStatistic
{
public int id;
public String name;
public String percentExcluded;
public String percentSatisfying;
public long countSatisfying;
}
public Summary summary;
public List<InclusionRuleStatistic> inclusionRuleStats;
public String treemapData;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/TheraputicAreaDeserializer.java | src/main/java/org/ohdsi/webapi/feasibility/TheraputicAreaDeserializer.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonMappingException;
import java.io.IOException;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public class TheraputicAreaDeserializer extends JsonDeserializer<TheraputicArea> {
@Override
public TheraputicArea deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException {
TheraputicArea type = TheraputicArea.fromId(jp.getValueAsInt());
if (type != null) {
return type;
}
throw new JsonMappingException("invalid value for type, must be 'one' or 'two'");
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/PerformFeasibilityTasklet.java | src/main/java/org/ohdsi/webapi/feasibility/PerformFeasibilityTasklet.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlSplit;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.*;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public class PerformFeasibilityTasklet implements Tasklet {
private static final Logger log = LoggerFactory.getLogger(PerformFeasibilityTasklet.class);
private final static String CREATE_TEMP_TABLES_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/inclusionRuleTable_CREATE.sql");
private final static String DROP_TEMP_TABLES_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/inclusionRuleTable_DROP.sql");
private final JdbcTemplate jdbcTemplate;
private final TransactionTemplate transactionTemplate;
private final FeasibilityStudyRepository feasibilityStudyRepository;
private final FeasibilityStudyQueryBuilder studyQueryBuilder;
public PerformFeasibilityTasklet(
final JdbcTemplate jdbcTemplate,
final TransactionTemplate transactionTemplate,
final FeasibilityStudyRepository feasibilityStudyRepository,
final ObjectMapper objectMapper) {
this.jdbcTemplate = jdbcTemplate;
this.transactionTemplate = transactionTemplate;
this.feasibilityStudyRepository = feasibilityStudyRepository;
this.studyQueryBuilder = new FeasibilityStudyQueryBuilder(objectMapper);
}
private StudyGenerationInfo findStudyGenerationInfoBySourceId(Collection<StudyGenerationInfo> infoList, Integer sourceId)
{
for (StudyGenerationInfo info : infoList) {
if (info.getId().getSourceId().equals(sourceId))
return info;
}
return null;
}
private CohortGenerationInfo findCohortGenerationInfoBySourceId(Collection<CohortGenerationInfo> infoList, Integer sourceId)
{
for (CohortGenerationInfo info : infoList) {
if (info.getId().getSourceId().equals(sourceId))
return info;
}
return null;
}
private void prepareTempTables(FeasibilityStudy study, String dialect, String sessionId) {
String translatedSql = SqlTranslate.translateSql(CREATE_TEMP_TABLES_TEMPLATE, dialect, sessionId, null);
String[] sqlStatements = SqlSplit.splitSql(translatedSql);
this.jdbcTemplate.batchUpdate(sqlStatements);
String insSql = "INSERT INTO #inclusionRules (study_id, sequence, name) VALUES (@studyId,@iteration,@ruleName)";
String[] names = new String[]{"studyId", "iteration", "ruleName"};
List<InclusionRule> inclusionRules = study.getInclusionRules();
for (int i = 0; i < inclusionRules.size(); i++) {
InclusionRule r = inclusionRules.get(i);
Object[] values = new Object[]{study.getId(), i, r.getName()};
PreparedStatementRenderer psr = new PreparedStatementRenderer(null, insSql, null, (String) null, names, values, sessionId);
jdbcTemplate.update(psr.getSql(), psr.getSetter());
}
}
private void cleanupTempTables(String dialect, String sessionId) {
String translatedSql = SqlTranslate.translateSql(DROP_TEMP_TABLES_TEMPLATE, dialect, sessionId, null);
String[] sqlStatements = SqlSplit.splitSql(translatedSql);
this.jdbcTemplate.batchUpdate(sqlStatements);
}
private int[] doTask(ChunkContext chunkContext) {
Map<String, Object> jobParams = chunkContext.getStepContext().getJobParameters();
Integer studyId = Integer.valueOf(jobParams.get("study_id").toString());
int[] result;
try {
String sessionId = SessionUtils.sessionId();
FeasibilityStudy study = this.feasibilityStudyRepository.findOne(studyId);
FeasibilityStudyQueryBuilder.BuildExpressionQueryOptions options = new FeasibilityStudyQueryBuilder.BuildExpressionQueryOptions();
options.cdmSchema = jobParams.get("cdm_database_schema").toString();
options.ohdsiSchema = jobParams.get("target_database_schema").toString();
options.cohortTable = jobParams.get("target_database_schema").toString() + "." + jobParams.get("target_table").toString();
if (study.getResultRule() != null) {
prepareTempTables(study, jobParams.get("target_dialect").toString(), sessionId);
String expressionSql = studyQueryBuilder.buildSimulateQuery(study, options);
String translatedSql = SqlTranslate.translateSql(expressionSql, jobParams.get("target_dialect").toString(), sessionId, null);
String[] sqlStatements = SqlSplit.splitSql(translatedSql);
result = PerformFeasibilityTasklet.this.jdbcTemplate.batchUpdate(sqlStatements);
cleanupTempTables(jobParams.get("target_dialect").toString(), sessionId);
} else {
String expressionSql = studyQueryBuilder.buildNullQuery(study, options);
String translatedSql = SqlTranslate.translateSql(expressionSql, jobParams.get("target_dialect").toString(), sessionId, null);
String[] sqlStatements = SqlSplit.splitSql(translatedSql);
result = PerformFeasibilityTasklet.this.jdbcTemplate.batchUpdate(sqlStatements);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return result;
}
@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception {
Date startTime = Calendar.getInstance().getTime();
Map<String, Object> jobParams = chunkContext.getStepContext().getJobParameters();
Integer studyId = Integer.valueOf(jobParams.get("study_id").toString());
Integer sourceId = Integer.valueOf(jobParams.get("source_id").toString());
boolean isValid = false;
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.transactionTemplate.getTransactionManager().getTransaction(requresNewTx);
FeasibilityStudy study = this.feasibilityStudyRepository.findOne(studyId);
CohortDefinition resultDef = study.getResultRule();
if (resultDef != null) {
CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultDef.getGenerationInfoList(), sourceId);
resultInfo.setIsValid(false)
.setStatus(GenerationStatus.RUNNING)
.setStartTime(startTime)
.setExecutionDuration(null);
}
StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(), sourceId);
studyInfo.setIsValid(false);
studyInfo.setStartTime(startTime);
studyInfo.setStatus(GenerationStatus.RUNNING);
this.feasibilityStudyRepository.save(study);
this.transactionTemplate.getTransactionManager().commit(initStatus);
try {
final int[] ret = this.transactionTemplate.execute(new TransactionCallback<int[]>() {
@Override
public int[] doInTransaction(final TransactionStatus status) {
return doTask(chunkContext);
}
});
log.debug("Update count: {}", ret.length);
isValid = true;
} catch (final TransactionException e) {
isValid = false;
log.error(whitelist(e));
throw e;//FAIL job status
}
finally {
TransactionStatus completeStatus = this.transactionTemplate.getTransactionManager().getTransaction(requresNewTx);
Date endTime = Calendar.getInstance().getTime();
study = this.feasibilityStudyRepository.findOne(studyId);
resultDef = study.getResultRule();
if (resultDef != null)
{
CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultDef.getGenerationInfoList(), sourceId);
resultInfo.setIsValid(isValid);
resultInfo.setExecutionDuration(new Integer((int)(endTime.getTime() - startTime.getTime())));
resultInfo.setStatus(GenerationStatus.COMPLETE);
}
studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(), sourceId);
studyInfo.setIsValid(isValid);
studyInfo.setExecutionDuration(new Integer((int)(endTime.getTime() - startTime.getTime())));
studyInfo.setStatus(GenerationStatus.COMPLETE);
this.feasibilityStudyRepository.save(study);
this.transactionTemplate.getTransactionManager().commit(completeStatus);
}
return RepeatStatus.FINISHED;
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/StudyGenerationInfoId.java | src/main/java/org/ohdsi/webapi/feasibility/StudyGenerationInfoId.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Embeddable;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@Embeddable
public class StudyGenerationInfoId implements Serializable {
private static final long serialVersionUID = 1L;
public StudyGenerationInfoId() {
}
public StudyGenerationInfoId(Integer studyId, Integer sourceId) {
this.studyId = studyId;
this.sourceId = sourceId;
}
@Column(name = "study_id", insertable = false, updatable = false)
private Integer studyId;
@Column(name = "source_id")
private Integer sourceId;
public Integer getStudyId() {
return studyId;
}
public void setStudyId(Integer studyId) {
this.studyId = studyId;
}
public Integer getSourceId() {
return sourceId;
}
public void setSourceId(Integer sourceId) {
this.sourceId = sourceId;
}
public boolean equals(Object o) {
return ((o instanceof StudyGenerationInfoId)
&& studyId.equals(((StudyGenerationInfoId) o).getStudyId())
&& sourceId.equals(((StudyGenerationInfoId) o).getSourceId()));
}
public int hashCode() {
return studyId + sourceId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/StudyGenerationInfo.java | src/main/java/org/ohdsi/webapi/feasibility/StudyGenerationInfo.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MapsId;
import javax.persistence.Table;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.source.Source;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@Entity(name = "StudyGenerationInfo")
@Table(name="feas_study_generation_info")
public class StudyGenerationInfo implements Serializable {
private static final long serialVersionUID = 1L;
@EmbeddedId
private StudyGenerationInfoId id;
@JsonIgnore
@ManyToOne
@MapsId("studyId")
@JoinColumn(name="study_id", referencedColumnName="id")
private FeasibilityStudy study;
@JsonIgnore
@ManyToOne
@MapsId("sourceId")
@JoinColumn(name="source_id", referencedColumnName="source_id")
private Source source;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm")
@Column(name="start_time")
private Date startTime;
@Column(name="execution_duration")
private Integer executionDuration;
@Column(name="status")
private GenerationStatus status;
@Column(name="is_valid")
private boolean isValid;
@Column(name = "is_canceled")
private boolean isCanceled;
public StudyGenerationInfo()
{
}
public StudyGenerationInfo(FeasibilityStudy study, Source source)
{
this.id = new StudyGenerationInfoId(study.getId(), source.getSourceId());
this.source = source;
this.study = study;
}
public StudyGenerationInfoId getId() {
return id;
}
public void setId(StudyGenerationInfoId id) {
this.id = id;
}
public Date getStartTime() {
return startTime;
}
public StudyGenerationInfo setStartTime(Date startTime) {
this.startTime = startTime;
return this;
}
public Integer getExecutionDuration() {
return executionDuration;
}
public StudyGenerationInfo setExecutionDuration(Integer executionDuration) {
this.executionDuration = executionDuration;
return this;
}
public GenerationStatus getStatus() {
return status;
}
public StudyGenerationInfo setStatus(GenerationStatus status) {
this.status = status;
return this;
}
public boolean isIsValid() {
return isValid;
}
public StudyGenerationInfo setIsValid(boolean isValid) {
this.isValid = isValid;
return this;
}
public boolean isCanceled() {
return isCanceled;
}
public void setCanceled(boolean canceled) {
isCanceled = canceled;
}
public FeasibilityStudy getStudy() {
return study;
}
public Source getSource() {
return source;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudyRepository.java | src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudyRepository.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import org.springframework.data.jpa.repository.EntityGraph;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public interface FeasibilityStudyRepository extends CrudRepository<FeasibilityStudy, Integer> {
@EntityGraph(value = "FeasibilityStudy.forEdit", type = EntityGraph.EntityGraphType.LOAD)
@Query("select fs from FeasibilityStudy fs where id = ?1")
FeasibilityStudy findOneWithDetail(Integer id);
@EntityGraph(value = "FeasibilityStudy.forInfo", type = EntityGraph.EntityGraphType.LOAD)
@Query("select fs from FeasibilityStudy fs where id = ?1")
FeasibilityStudy findOneWithInfo(Integer id);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/InclusionRule.java | src/main/java/org/ohdsi/webapi/feasibility/InclusionRule.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Lob;
import javax.persistence.Table;
import org.hibernate.annotations.Type;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@Embeddable
public class InclusionRule {
@Column(name="name")
private String name;
@Column(name="description")
private String description;
@Column(name="expression")
@Lob
@Type(type = "org.hibernate.type.TextType")
private String expression;
public String getName() {
return name;
}
public InclusionRule setName(String name) {
this.name = name;
return this;
}
public String getDescription() {
return description;
}
public InclusionRule setDescription(String description) {
this.description = description;
return this;
}
public String getExpression() {
return expression;
}
public InclusionRule setExpression(String expression) {
this.expression = expression;
return this;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/TheraputicArea.java | src/main/java/org/ohdsi/webapi/feasibility/TheraputicArea.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@JsonFormat(shape=JsonFormat.Shape.OBJECT)
public enum TheraputicArea {
// [{ id: 0, name: 'Cardiovascular & Metabolism' }, { id: 1, name: 'Immunology' }, { id: 2, name: 'Infectious Diseases & Vaccines' }, { id: 3, name: 'Neuroscience' }, { id: 4, name: 'Oncology' }]
CARDIOVASCULAR_METABOLISM(0,"Cardiovascular & Metabolism"),
IMMUNOLOGY(1,"Immunology"),
INFECTIOUSDISEASE_VACCINES(2,"Infectious Diseases & Vaccines"),
NEUROSCIENCE(3,"Neuroscience"),
ONCOLOGY(4,"Oncology");
private final int id;
private final String name;
private TheraputicArea(final int id, final String name) {
this.id = id;
this.name = name;
}
@JsonProperty("id")
public int getId() {
return id;
}
@JsonProperty("name")
public String getName() {
return name;
}
/**
* Gets a MyEnumType from id or <tt>null</tt> if the requested type doesn't exist.
* @param id String
* @return MyEnumType
*/
public static TheraputicArea fromId(final int id) {
for (TheraputicArea type : TheraputicArea.values()) {
if (id == type.id) {
return type;
}
}
return null;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/PerformFeasibilityTask.java | src/main/java/org/ohdsi/webapi/feasibility/PerformFeasibilityTask.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ohdsi.webapi.feasibility.FeasibilityStudyQueryBuilder.BuildExpressionQueryOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public class PerformFeasibilityTask {
private static final Logger log = LoggerFactory.getLogger(PerformFeasibilityTask.class);
//TODO: Define task-specific paramters
private BuildExpressionQueryOptions options;
private String sourceDialect;
private String targetDialect;
public BuildExpressionQueryOptions getOptions()
{
return this.options;
}
public PerformFeasibilityTask setOptions(BuildExpressionQueryOptions options)
{
this.options = options;
return this;
}
public String getSourceDialect() {
return sourceDialect;
}
public PerformFeasibilityTask setSourceDialect(String sourceDialect) {
this.sourceDialect = sourceDialect;
return this;
}
public String getTargetDialect() {
return targetDialect;
}
public PerformFeasibilityTask setTargetDialect(String targetDialect) {
this.targetDialect = targetDialect;
return this;
}
@Override
public String toString() {
try {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
} catch (Exception e) {
log.error(whitelist(e));
}
return super.toString();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java | src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedAttributeNode;
import javax.persistence.NamedEntityGraph;
import javax.persistence.NamedEntityGraphs;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.OrderColumn;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@Entity(name = "FeasibilityStudy")
@Table(name="feasibility_study")
@NamedEntityGraphs({
@NamedEntityGraph(
name = "FeasibilityStudy.forEdit",
attributeNodes = {
@NamedAttributeNode(value = "inclusionRules"),
}
),
@NamedEntityGraph(
name = "FeasibilityStudy.forInfo",
attributeNodes = {
@NamedAttributeNode(value = "studyGenerationInfoList")
}
)
})
public class FeasibilityStudy {
@Id
@GenericGenerator(
name = "feasibility_study_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "feasibility_study_sequence"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "feasibility_study_generator")
@Column(name="id")
@Access(AccessType.PROPERTY)
private Integer id;
@Column(name="name")
private String name;
@Column(name="description")
private String description;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
@JoinColumn(name="index_def_id")
private CohortDefinition indexRule;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
@JoinColumn(name="result_def_id")
private CohortDefinition resultRule;
@OneToMany(fetch= FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "study", orphanRemoval=true)
private Set<StudyGenerationInfo> studyGenerationInfoList = new HashSet<StudyGenerationInfo>();
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "created_by_id")
private UserEntity createdBy;
@Column(name="created_date")
private Date createdDate;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "modified_by_id")
private UserEntity modifiedBy;
@Column(name="modified_date")
private Date modifiedDate;
@ElementCollection
@CollectionTable(name = "feasibility_inclusion", joinColumns = @JoinColumn(name = "study_id"))
@OrderColumn(name="sequence")
private List<InclusionRule> inclusionRules = new ArrayList<InclusionRule>();
public Integer getId() {
return id;
}
public FeasibilityStudy setId(Integer id) {
this.id = id;
return this;
}
public String getName() {
return name;
}
public FeasibilityStudy setName(String name) {
this.name = name;
return this;
}
public String getDescription() {
return description;
}
public FeasibilityStudy setDescription(String description) {
this.description = description;
return this;
}
public CohortDefinition getIndexRule() {
return indexRule;
}
public FeasibilityStudy setIndexRule(CohortDefinition indexRule) {
this.indexRule = indexRule;
return this;
}
public CohortDefinition getResultRule() {
return resultRule;
}
public FeasibilityStudy setResultRule(CohortDefinition resultRule) {
this.resultRule = resultRule;
return this;
}
public Set<StudyGenerationInfo> getStudyGenerationInfoList() {
return studyGenerationInfoList;
}
public FeasibilityStudy setStudyGenerationInfoList(Set<StudyGenerationInfo> studyGenerationInfoList) {
this.studyGenerationInfoList = studyGenerationInfoList;
return this;
}
public UserEntity getCreatedBy() {
return createdBy;
}
public FeasibilityStudy setCreatedBy(UserEntity createdBy) {
this.createdBy = createdBy;
return this;
}
public Date getCreatedDate() {
return createdDate;
}
public FeasibilityStudy setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
return this;
}
public UserEntity getModifiedBy() {
return modifiedBy;
}
public FeasibilityStudy setModifiedBy(UserEntity modifiedBy) {
this.modifiedBy = modifiedBy;
return this;
}
public Date getModifiedDate() {
return modifiedDate;
}
public FeasibilityStudy setModifiedDate(Date modifiedDate) {
this.modifiedDate = modifiedDate;
return this;
}
public List<InclusionRule> getInclusionRules() {
return inclusionRules;
}
public FeasibilityStudy setInclusionRules(List<InclusionRule> inclusionRules) {
this.inclusionRules = inclusionRules;
return this;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudyQueryBuilder.java | src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudyQueryBuilder.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.feasibility;
import static org.ohdsi.webapi.Constants.SqlSchemaPlaceholders.CDM_DATABASE_SCHEMA_PLACEHOLDER;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.circe.cohortdefinition.CohortExpression;
import org.ohdsi.circe.cohortdefinition.CohortExpressionQueryBuilder;
import org.ohdsi.circe.cohortdefinition.CriteriaGroup;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
public class FeasibilityStudyQueryBuilder {
private final static ConceptSetExpressionQueryBuilder conceptSetQueryBuilder = new ConceptSetExpressionQueryBuilder();
private final static CohortExpressionQueryBuilder cohortExpressionQueryBuilder = new CohortExpressionQueryBuilder();
private final static String PERFORM_FEASIBILITY_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/performFeasibilityStudy.sql");
private final static String PERFORM_NULL_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/nullStudy.sql");
private final static String INDEX_COHORT_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/indexCohort.sql");
private final static String INCLUSION_RULE_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/feasibility/sql/inclusionrule.sql");
public static class BuildExpressionQueryOptions {
@JsonProperty("cdmSchema")
public String cdmSchema;
@JsonProperty("ohdsiSchema")
public String ohdsiSchema;
@JsonProperty("cohortTable")
public String cohortTable;
}
private ObjectMapper objectMapper;
public FeasibilityStudyQueryBuilder(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
private String getInclusionRuleInserts(FeasibilityStudy study)
{
String insertTemplate = "insert into #inclusionRules values (%d, %d, %s)\n";
StringBuilder insertStatements = new StringBuilder();
List<InclusionRule> inclusionRules = study.getInclusionRules();
for (int i = 0; i< inclusionRules.size(); i++)
{
InclusionRule r = inclusionRules.get(i);
insertStatements.append(String.format(insertTemplate, study.getId(), i, r.getName()));
}
return insertStatements.toString();
}
private String getInclusionRuleQuery(CriteriaGroup inclusionRule)
{
String resultSql = INCLUSION_RULE_QUERY_TEMPLATE;
String additionalCriteriaQuery = "\nJOIN (\n" + cohortExpressionQueryBuilder.getCriteriaGroupQuery(inclusionRule, "#primary_events") + ") AC on AC.event_id = pe.event_id";
additionalCriteriaQuery = StringUtils.replace(additionalCriteriaQuery,"@indexId", "" + 0);
resultSql = StringUtils.replace(resultSql, "@additionalCriteriaQuery", additionalCriteriaQuery);
return resultSql;
}
public String buildSimulateQuery(FeasibilityStudy study, BuildExpressionQueryOptions options) {
String resultSql = PERFORM_FEASIBILITY_QUERY_TEMPLATE;
CohortExpression indexRule;
ArrayList<CriteriaGroup> inclusionRules = new ArrayList<>();
try
{
indexRule = objectMapper.readValue(study.getIndexRule().getDetails().getExpression(), CohortExpression.class);
for (InclusionRule inclusionRule : study.getInclusionRules())
{
inclusionRules.add(objectMapper.readValue(inclusionRule.getExpression(), CriteriaGroup.class));
}
}
catch (Exception e)
{
throw new RuntimeException(e);
}
// everything deserialized successfully
String codesetQuery = cohortExpressionQueryBuilder.getCodesetQuery(indexRule.conceptSets);
resultSql = StringUtils.replace(resultSql, "@codesetQuery", codesetQuery);
String indexCohortQuery = INDEX_COHORT_QUERY_TEMPLATE;
indexCohortQuery = StringUtils.replace(indexCohortQuery, "@indexCohortId", "" + study.getIndexRule().getId());
resultSql = StringUtils.replace(resultSql, "@indexCohortQuery", indexCohortQuery);
ArrayList<String> inclusionRuleInserts = new ArrayList<>();
for (int i = 0; i < inclusionRules.size(); i++)
{
CriteriaGroup cg = inclusionRules.get(i);
String inclusionRuleInsert = getInclusionRuleQuery(cg);
inclusionRuleInsert = StringUtils.replace(inclusionRuleInsert, "@inclusion_rule_id", "" + i);
inclusionRuleInserts.add(inclusionRuleInsert);
}
resultSql = StringUtils.replace(resultSql,"@inclusionCohortInserts", StringUtils.join(inclusionRuleInserts,"\n"));
if (options != null)
{
// replace query parameters with tokens
resultSql = StringUtils.replace(resultSql, CDM_DATABASE_SCHEMA_PLACEHOLDER, options.cdmSchema);
resultSql = StringUtils.replace(resultSql, "@ohdsi_database_schema", options.ohdsiSchema);
resultSql = StringUtils.replace(resultSql, "@cohortTable", options.cohortTable);
}
resultSql = StringUtils.replace(resultSql, "@resultCohortId", study.getResultRule().getId().toString());
resultSql = StringUtils.replace(resultSql, "@studyId", study.getId().toString());
return resultSql;
}
public String buildNullQuery(FeasibilityStudy study, BuildExpressionQueryOptions options)
{
String resultSql = PERFORM_NULL_QUERY_TEMPLATE;
if (options != null)
{
// replease query parameters with tokens
resultSql = StringUtils.replace(resultSql, CDM_DATABASE_SCHEMA_PLACEHOLDER, options.cdmSchema);
resultSql = StringUtils.replace(resultSql, "@ohdsi_database_schema", options.ohdsiSchema);
resultSql = StringUtils.replace(resultSql, "@cohortTable", options.cohortTable);
}
resultSql = StringUtils.replace(resultSql, "@indexCohortId", "" + study.getIndexRule().getId());
resultSql = StringUtils.replace(resultSql, "@studyId", study.getId().toString());
return resultSql;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/SourceMapKey.java | src/main/java/org/ohdsi/webapi/common/SourceMapKey.java | package org.ohdsi.webapi.common;
import org.ohdsi.webapi.source.Source;
import java.util.function.Function;
public final class SourceMapKey<T> {
public static SourceMapKey<String> BY_SOURCE_KEY = new SourceMapKey<>(Source::getSourceKey);
public static SourceMapKey<Integer> BY_SOURCE_ID = new SourceMapKey<>(Source::getSourceId);
private Function<Source, T> keyFunc;
private SourceMapKey(Function<Source, T> keyFunc) {
this.keyFunc = keyFunc;
}
public Function<Source, T> getKeyFunc() {
return keyFunc;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/CommonConceptSetEntity.java | src/main/java/org/ohdsi/webapi/common/CommonConceptSetEntity.java | package org.ohdsi.webapi.common;
import org.hibernate.annotations.Type;
import org.ohdsi.analysis.Utils;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import javax.persistence.Column;
import javax.persistence.Lob;
import javax.persistence.MappedSuperclass;
import java.util.List;
import java.util.Objects;
@MappedSuperclass
public class CommonConceptSetEntity {
@Lob
@Column(name = "expression")
@Type(type = "org.hibernate.type.TextType")
private String rawExpression;
public String getRawExpression() {
return rawExpression;
}
public void setRawExpression(String rawExpression) {
this.rawExpression = rawExpression;
}
public List<ConceptSet> getConceptSets() {
return Objects.nonNull(this.rawExpression) ?
Utils.deserialize(this.rawExpression, typeFactory -> typeFactory.constructCollectionType(List.class, ConceptSet.class)) : null;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/DesignImportService.java | src/main/java/org/ohdsi/webapi/common/DesignImportService.java | package org.ohdsi.webapi.common;
import java.util.Arrays;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetailsRepository;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.conceptset.ConceptSetRepository;
import org.ohdsi.webapi.service.CohortDefinitionService;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.util.NameUtils;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.cache.Cache;
import javax.cache.CacheManager;
import org.ohdsi.webapi.analysis.AnalysisConceptSet;
import org.ohdsi.webapi.conceptset.ConceptSetItem;
import org.ohdsi.webapi.service.ConceptSetService;
import org.ohdsi.webapi.service.dto.ConceptSetDTO;
import org.ohdsi.webapi.shiro.PermissionManager;
import org.springframework.core.convert.ConversionService;
@Service
public class DesignImportService {
private final Security security;
private final UserRepository userRepository;
private final CohortDefinitionRepository cohortRepository;
private final CohortDefinitionDetailsRepository detailsRepository;
private final ConversionService conversionService;
private final ConceptSetService conceptSetService;
private final CohortDefinitionService cohortDefinitionService;
private final CacheManager cacheManager;
public DesignImportService(Security security, UserRepository userRepository, CohortDefinitionRepository cohortRepository,
CohortDefinitionDetailsRepository detailsRepository, ConceptSetService conceptSetService,
ConversionService conversionService, CohortDefinitionService cohortDefinitionService,
@Nullable CacheManager cacheManager) {
this.security = security;
this.userRepository = userRepository;
this.cohortRepository = cohortRepository;
this.detailsRepository = detailsRepository;
this.conceptSetService = conceptSetService;
this.conversionService = conversionService;
this.cohortDefinitionService = cohortDefinitionService;
this.cacheManager = cacheManager;
}
public ConceptSetDTO persistConceptSet(final AnalysisConceptSet analysisConceptSet) {
ConceptSetDTO cs = conversionService.convert(analysisConceptSet, ConceptSetDTO.class);
cs.setName(NameUtils.getNameWithSuffix(cs.getName(), this::getCsNamesLike));
cs = conceptSetService.createConceptSet(cs);
final Integer conceptSetId = cs.getId();
List<ConceptSetItem> csi = Arrays.stream(analysisConceptSet.expression.items).map(i -> conversionService.convert(i, ConceptSetItem.class)).collect(Collectors.toList());
csi.forEach(n -> n.setConceptSetId(conceptSetId));
conceptSetService.saveConceptSetItems(cs.getId(), csi.stream().toArray(ConceptSetItem[]::new));
return cs;
}
public CohortDefinition persistCohortOrGetExisting(final CohortDefinition cohort) {
return this.persistCohortOrGetExisting(cohort, false);
}
public CohortDefinition persistCohortOrGetExisting(final CohortDefinition cohort, final Boolean includeCohortNameInComparison) {
final CohortDefinitionDetails details = cohort.getDetails();
Optional<CohortDefinition> findCohortResult = includeCohortNameInComparison ? this.findCohortByExpressionHashcodeAndName(details, cohort.getName()) : this.findCohortByExpressionHashcode(details);
return findCohortResult.orElseGet(() -> {
final UserEntity user = userRepository.findByLogin(security.getSubject());
cohort.setId(null);
cohort.setCreatedBy(user);
cohort.setCreatedDate(new Date());
cohort.setDetails(details);
details.setCohortDefinition(cohort);
cohort.setName(NameUtils.getNameWithSuffix(cohort.getName(), this::getCdNamesLike));
final CohortDefinition savedCohort = cohortRepository.save(cohort);
detailsRepository.save(details);
// if this is new, we will need to decache the cohort definition list
if (this.cacheManager != null) {
Cache cohortDefCache = cacheManager.getCache(CohortDefinitionService.CachingSetup.COHORT_DEFINITION_LIST_CACHE);
if (cohortDefCache != null) {
cohortDefCache.clear(); // wipes all entries in cohort definition list cache cache
}
}
// permission caching is handled via the EntityInsertEventListener and EntityPermissionSchema.onInsert
return savedCohort;
});
}
private List<String> getCsNamesLike(String name) {
return conceptSetService.getNamesLike(name);
}
private List<String> getCdNamesLike(String name) {
return cohortDefinitionService.getNamesLike(name);
}
private Optional<CohortDefinition> findCohortByExpressionHashcode(final CohortDefinitionDetails details) {
return this.findCohortByExpressionHashcodeAndPredicate(details, (c -> true));
}
private Optional<CohortDefinition> findCohortByExpressionHashcodeAndName(final CohortDefinitionDetails details, final String cohortName) {
return this.findCohortByExpressionHashcodeAndPredicate(details, c -> Objects.equals(c.getName(), cohortName));
}
private Optional<CohortDefinition> findCohortByExpressionHashcodeAndPredicate(final CohortDefinitionDetails details, final Predicate<CohortDefinition> c) {
List<CohortDefinitionDetails> detailsFromDb = detailsRepository.findByHashCode(details.calculateHashCode());
return detailsFromDb
.stream()
.filter(v -> Objects.equals(v.getStandardizedExpression(), details.getStandardizedExpression()))
.map(CohortDefinitionDetails::getCohortDefinition)
.filter(c)
.findFirst();
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/OptionDTO.java | src/main/java/org/ohdsi/webapi/common/OptionDTO.java | package org.ohdsi.webapi.common;
public class OptionDTO {
private String id;
private String name;
public OptionDTO() {}
public OptionDTO(String id, String name) {
this.id = id;
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationBaseInfo.java | src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationBaseInfo.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import javax.persistence.*;
@Embeddable
@MappedSuperclass
public class AnalysisGenerationBaseInfo {
@Column(name = "hash_code")
protected Integer hashCode;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "created_by_id", updatable = false)
protected UserEntity createdBy;
public Integer getHashCode() {
return hashCode;
}
public UserEntity getCreatedBy() {
return createdBy;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AnalysisExecutionSupport.java | src/main/java/org/ohdsi/webapi/common/generation/AnalysisExecutionSupport.java | package org.ohdsi.webapi.common.generation;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.hydra.Hydra;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.util.StringGenerationUtil;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.source.Source;
import org.springframework.batch.core.JobParametersBuilder;
import java.io.OutputStream;
import static org.ohdsi.webapi.Constants.Params.*;
public abstract class AnalysisExecutionSupport extends AbstractDaoService {
protected AnalysisFile prepareAnalysisExecution(String packageName, String packageFilename, Number analysisId) {
AnalysisFile execFile = new AnalysisFile();
execFile.setFileName("runAnalysis.R");
String[] paramNames = {"packageFile", "packageName", "analysisDir"};
String[] paramValues = {packageFilename, packageName, String.format("analysis_%d", analysisId)};
// renderSql is used to replace template params with actual values in the R script template
String script = SqlRender.renderSql(getExecutionScript(), paramNames, paramValues);
execFile.setContents(script.getBytes());
return execFile;
}
protected JobParametersBuilder prepareJobParametersBuilder(Source source, Integer ananlysisId, String packageName, String packageFilename) {
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString(SOURCE_ID, String.valueOf(source.getSourceId()));
builder.addString(UPDATE_PASSWORD, StringGenerationUtil.generateRandomString());
builder.addString(JOB_AUTHOR, getCurrentUserLogin());
builder.addString(PACKAGE_NAME, packageName);
builder.addString(PACKAGE_FILE_NAME, packageFilename);
builder.addString(EXECUTABLE_FILE_NAME, "runAnalysis.R");
return builder;
}
protected void hydrateAnalysis(Object analysis, OutputStream out) throws JsonProcessingException {
String studySpecs = Utils.serialize(analysis, true);
Hydra h = new Hydra(studySpecs);
h.hydrate(out);
}
protected abstract String getExecutionScript();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/CommonGenerationToDtoConverter.java | src/main/java/org/ohdsi/webapi/common/generation/CommonGenerationToDtoConverter.java | package org.ohdsi.webapi.common.generation;
import org.springframework.stereotype.Component;
@Component
public class CommonGenerationToDtoConverter extends BaseCommonGenerationToDtoConverter<CommonGeneration, CommonGenerationDTO> {
@Override
protected CommonGenerationDTO createResultObject() {
return new CommonGenerationDTO();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationInfo.java | src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationInfo.java | package org.ohdsi.webapi.common.generation;
import javax.persistence.Column;
import javax.persistence.Embeddable;
@Embeddable
public class AnalysisGenerationInfo extends AnalysisGenerationBaseInfo{
@Column(name = "design")
protected String design;
public String getDesign() {
return design;
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java | src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.cohortcharacterization.repository.AnalysisGenerationInfoEntityRepository;
import org.ohdsi.webapi.exception.AtlasException;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.util.CancelableJdbcTemplate;
import org.slf4j.Logger;
import org.springframework.batch.core.step.tasklet.StoppableTasklet;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate;
public abstract class AnalysisTasklet extends CancelableTasklet implements StoppableTasklet {
protected final AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository;
public AnalysisTasklet(Logger log,
CancelableJdbcTemplate jdbcTemplate,
TransactionTemplate transactionTemplate,
AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository) {
super(log, jdbcTemplate, transactionTemplate);
this.analysisGenerationInfoEntityRepository = analysisGenerationInfoEntityRepository;
}
protected void saveInfoWithinTheSeparateTransaction(Long jobId, String serializedDesign, UserEntity userEntity) {
DefaultTransactionDefinition txDefinition = new DefaultTransactionDefinition();
txDefinition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus infoSaveTx = null;
try {
infoSaveTx = this.transactionTemplate.getTransactionManager().getTransaction(txDefinition);
saveInfo(jobId, serializedDesign, userEntity);
this.transactionTemplate.getTransactionManager().commit(infoSaveTx);
} catch (Exception ex) {
log.error("Cannot save sourceInfo for the job: {} ", jobId, ex);
this.transactionTemplate.getTransactionManager().rollback(infoSaveTx);
throw new AtlasException(ex);
}
}
private void saveInfo(Long jobId, String serializedDesign, UserEntity userEntity) {
AnalysisGenerationInfoEntity generationInfoEntity = new AnalysisGenerationInfoEntity();
generationInfoEntity.setId(jobId);
generationInfoEntity.setDesign(serializedDesign);
generationInfoEntity.setCreatedBy(userEntity);
analysisGenerationInfoEntityRepository.save(generationInfoEntity);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/ExecutionEngineGenerationEntityToDtoConverter.java | src/main/java/org/ohdsi/webapi/common/generation/ExecutionEngineGenerationEntityToDtoConverter.java | package org.ohdsi.webapi.common.generation;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import java.util.Objects;
public class ExecutionEngineGenerationEntityToDtoConverter<T extends ExecutionEngineGenerationEntity> extends BaseCommonGenerationToDtoConverter<T, ExecutionBasedGenerationDTO> {
@Override
protected ExecutionBasedGenerationDTO createResultObject() {
return new ExecutionBasedGenerationDTO();
}
@Override
public ExecutionBasedGenerationDTO convert(T source) {
ExecutionBasedGenerationDTO dto = super.convert(source);
if (source.getAnalysisExecution() != null && source.getAnalysisExecution().getExecutionStatus() != null) {
dto.setStatus(source.getAnalysisExecution().getExecutionStatus().name());
}
if (nonNull(source.getAnalysisExecution()) && nonNull(source.getAnalysisExecution().getResultFiles())) {
dto.setNumResultFiles(source.getAnalysisExecution().getResultFiles().size());
} else {
dto.setNumResultFiles(0);
}
return dto;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/BaseCommonGenerationToDtoConverter.java | src/main/java/org/ohdsi/webapi/common/generation/BaseCommonGenerationToDtoConverter.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
public abstract class BaseCommonGenerationToDtoConverter<E extends CommonGeneration, D extends CommonGenerationDTO> extends BaseConversionServiceAwareConverter<E, D> {
@Override
public D convert(final E source) {
final D resultObject = createResultObject(source);
resultObject.setId(source.getId());
resultObject.setStatus(source.getStatus());
resultObject.setSourceKey(source.getSource().getSourceKey());
resultObject.setHashCode(source.getHashCode());
resultObject.setStartTime(source.getStartTime());
resultObject.setEndTime(source.getEndTime());
resultObject.setExitMessage(source.getExitMessage());
return resultObject;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/StoppableTransactionalTasklet.java | src/main/java/org/ohdsi/webapi/common/generation/StoppableTransactionalTasklet.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.Constants;
import org.slf4j.Logger;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.StoppableTasklet;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.concurrent.FutureTask;
public abstract class StoppableTransactionalTasklet<T> extends TransactionalTasklet<T> implements StoppableTasklet {
protected final long checkInterval = 1000L;
private volatile boolean stopped = false;
public StoppableTransactionalTasklet(Logger log, TransactionTemplate transactionTemplate) {
super(log, transactionTemplate);
}
protected int[] waitForFuture(FutureTask<int[]> futureTask) {
try {
while (true) {
Thread.sleep(checkInterval);
if (futureTask.isDone()) {
return futureTask.get();
} else if (isStopped()) {
futureTask.cancel(true);
return null;
}
}
} catch (Exception e) {
if (isStopped() && e.getCause() instanceof DataAccessResourceFailureException) {
// ignore exception
return null;
}
throw new RuntimeException(e);
}
}
protected boolean isStopped() {
return stopped;
}
@Override
protected void doAfter(StepContribution stepContribution, ChunkContext chunkContext) {
if (isStopped()) {
stepContribution.setExitStatus(new ExitStatus(Constants.CANCELED, "Canceled by user request"));
}
}
@Override
public void stop() {
this.stopped = true;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/CancelJobListener.java | src/main/java/org/ohdsi/webapi/common/generation/CancelJobListener.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.Constants;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.listener.JobExecutionListenerSupport;
import java.util.Objects;
public class CancelJobListener extends JobExecutionListenerSupport {
@Override
public void afterJob(JobExecution jobExecution) {
if (jobExecution.getStepExecutions().stream()
.anyMatch(se -> Objects.equals(Constants.CANCELED, se.getExitStatus().getExitCode()))) {
jobExecution.setExitStatus(new ExitStatus(Constants.CANCELED, "Canceled by user request"));
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/CommonGeneration.java | src/main/java/org/ohdsi/webapi/common/generation/CommonGeneration.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.source.Source;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.MappedSuperclass;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import java.util.Date;
@MappedSuperclass
public abstract class CommonGeneration {
@Id
@Column
protected Long id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "source_id")
protected Source source;
@Column(name = "start_time")
@Temporal(TemporalType.TIMESTAMP)
protected Date startTime;
@Column(name = "end_time")
@Temporal(TemporalType.TIMESTAMP)
protected Date endTime;
@Column
protected String status;
@Column(name = "exit_message")
private String exitMessage;
@Embedded
protected AnalysisGenerationBaseInfo info;
public Long getId() {
return id;
}
public Source getSource() {
return source;
}
public Date getStartTime() {
return startTime;
}
public Date getEndTime() {
return endTime;
}
public String getStatus() {
return status;
}
public String getExitMessage() {
return exitMessage;
}
public Integer getHashCode() {
return this.info != null ? this.info.getHashCode() : null;
}
public UserEntity getCreatedBy() {
return this.info != null ? this.info.getCreatedBy() : null;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/ExecutionBasedGenerationDTO.java | src/main/java/org/ohdsi/webapi/common/generation/ExecutionBasedGenerationDTO.java | package org.ohdsi.webapi.common.generation;
public class ExecutionBasedGenerationDTO extends CommonGenerationDTO {
private int numResultFiles;
public int getNumResultFiles() {
return numResultFiles;
}
public void setNumResultFiles(int numResultFiles) {
this.numResultFiles = numResultFiles;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/TransactionalTasklet.java | src/main/java/org/ohdsi/webapi/common/generation/TransactionalTasklet.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.exception.AtlasException;
import org.slf4j.Logger;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public abstract class TransactionalTasklet<T> implements Tasklet {
protected final ExecutorService taskExecutor;
protected final Logger log;
protected final TransactionTemplate transactionTemplate;
public TransactionalTasklet(Logger log, TransactionTemplate transactionTemplate) {
this.taskExecutor = Executors.newSingleThreadExecutor();
this.log = log;
this.transactionTemplate = transactionTemplate;
}
protected void doBefore(ChunkContext chunkContext) {
}
protected void doAfter(StepContribution stepContribution, ChunkContext chunkContext) {
}
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
try {
doBefore(chunkContext);
this.transactionTemplate.execute(status -> doTask(chunkContext));
contribution.setExitStatus(ExitStatus.COMPLETED);
} catch (final Throwable ex) {
log.error(ex.getMessage(), ex);
contribution.setExitStatus(new ExitStatus(Constants.FAILED, ex.getMessage()));
throw new AtlasException(ex);
} finally {
taskExecutor.shutdown();
doAfter(contribution, chunkContext);
}
return RepeatStatus.FINISHED;
}
protected abstract T doTask(ChunkContext chunkContext);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/CancelableTasklet.java | src/main/java/org/ohdsi/webapi/common/generation/CancelableTasklet.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.util.CancelableJdbcTemplate;
import org.ohdsi.webapi.util.StatementCancel;
import org.slf4j.Logger;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.StoppableTasklet;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.transaction.support.TransactionTemplate;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
public abstract class CancelableTasklet extends StoppableTransactionalTasklet<int[]> implements StoppableTasklet {
protected final StatementCancel stmtCancel;
protected final CancelableJdbcTemplate jdbcTemplate;
public CancelableTasklet(Logger log,
CancelableJdbcTemplate jdbcTemplate,
TransactionTemplate transactionTemplate) {
super(log, transactionTemplate);
this.jdbcTemplate = jdbcTemplate;
this.stmtCancel = new StatementCancel();
}
protected int[] doTask(ChunkContext chunkContext) {
Callable<int[]> execution;
String[] queries = prepareQueries(chunkContext, jdbcTemplate);
if (Objects.nonNull(queries)) {
execution = () -> jdbcTemplate.batchUpdate(stmtCancel, queries);
} else {
List<PreparedStatementCreator> creators = prepareStatementCreators(chunkContext, jdbcTemplate);
if (Objects.nonNull(creators)) {
execution = () -> jdbcTemplate.batchUpdate(stmtCancel, creators);
} else {
execution = () -> new int[0];
}
}
FutureTask<int[]> batchUpdateTask = new FutureTask<>(execution);
taskExecutor.execute(batchUpdateTask);
return waitForFuture(batchUpdateTask);
}
protected String[] prepareQueries(ChunkContext chunkContext, CancelableJdbcTemplate jdbcTemplate) {
return null;
}
protected List<PreparedStatementCreator> prepareStatementCreators(ChunkContext chunkContext, CancelableJdbcTemplate jdbcTemplate) {
return null;
}
@Override
public void stop() {
try {
this.stmtCancel.cancel();
} catch (SQLException ignored) {
}
super.stop();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java | src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.cohortcharacterization.CreateCohortTableTasklet;
import org.ohdsi.webapi.cohortcharacterization.DropCohortTableListener;
import org.ohdsi.webapi.cohortcharacterization.GenerateLocalCohortTasklet;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.executionengine.job.CreateAnalysisTasklet;
import org.ohdsi.webapi.executionengine.job.ExecutionEngineCallbackTasklet;
import org.ohdsi.webapi.executionengine.job.RunExecutionEngineTasklet;
import org.ohdsi.webapi.executionengine.repository.ExecutionEngineGenerationRepository;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.ohdsi.webapi.generationcache.GenerationCacheHelper;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.service.CohortGenerationService;
import org.ohdsi.webapi.service.GenerationTaskExceptionHandler;
import org.ohdsi.webapi.service.JobService;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender;
import org.ohdsi.webapi.util.SessionUtils;
import org.ohdsi.webapi.util.SourceUtils;
import org.ohdsi.webapi.util.TempTableCleanupManager;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManager;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
import static org.ohdsi.webapi.Constants.Params.SESSION_ID;
import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE;
@Component
public class GenerationUtils extends AbstractDaoService {
private StepBuilderFactory stepBuilderFactory;
private TransactionTemplate transactionTemplate;
private CohortGenerationService cohortGenerationService;
private SourceService sourceService;
private JobBuilderFactory jobBuilders;
private JobService jobService;
private final SourceAwareSqlRender sourceAwareSqlRender;
private final ScriptExecutionService executionService;
private final ExecutionEngineGenerationRepository executionEngineGenerationRepository;
private final EntityManager entityManager;
private final GenerationCacheHelper generationCacheHelper;
@Value("${cache.generation.useAsync:false}")
private boolean useAsyncCohortGeneration;
public GenerationUtils(StepBuilderFactory stepBuilderFactory,
TransactionTemplate transactionTemplate,
CohortGenerationService cohortGenerationService,
SourceService sourceService,
JobBuilderFactory jobBuilders,
SourceAwareSqlRender sourceAwareSqlRender,
JobService jobService,
ScriptExecutionService executionService,
ExecutionEngineGenerationRepository executionEngineGenerationRepository,
EntityManager entityManager,
GenerationCacheHelper generationCacheHelper) {
this.stepBuilderFactory = stepBuilderFactory;
this.transactionTemplate = transactionTemplate;
this.cohortGenerationService = cohortGenerationService;
this.sourceService = sourceService;
this.jobBuilders = jobBuilders;
this.sourceAwareSqlRender = sourceAwareSqlRender;
this.jobService = jobService;
this.executionService = executionService;
this.executionEngineGenerationRepository = executionEngineGenerationRepository;
this.entityManager = entityManager;
this.generationCacheHelper = generationCacheHelper;
}
public static String getTempCohortTableName(String sessionId) {
return Constants.TEMP_COHORT_TABLE_PREFIX + sessionId;
}
public SimpleJobBuilder buildJobForCohortBasedAnalysisTasklet(
String analysisTypeName,
Source source,
JobParametersBuilder builder,
JdbcTemplate jdbcTemplate,
Function<ChunkContext, Collection<CohortDefinition>> cohortGetter,
CancelableTasklet analysisTasklet
) {
final String sessionId = SessionUtils.sessionId();
addSessionParams(builder, sessionId);
TempTableCleanupManager cleanupManager = new TempTableCleanupManager(
getSourceJdbcTemplate(source),
transactionTemplate,
source.getSourceDialect(),
sessionId,
SourceUtils.getTempQualifier(source)
);
GenerationTaskExceptionHandler exceptionHandler = new GenerationTaskExceptionHandler(cleanupManager);
CreateCohortTableTasklet createCohortTableTasklet = new CreateCohortTableTasklet(jdbcTemplate, transactionTemplate, sourceService, sourceAwareSqlRender);
Step createCohortTableStep = stepBuilderFactory.get(analysisTypeName + ".createCohortTable")
.tasklet(createCohortTableTasklet)
.build();
GenerateLocalCohortTasklet generateLocalCohortTasklet = new GenerateLocalCohortTasklet(
transactionTemplate,
getSourceJdbcTemplate(source),
cohortGenerationService,
sourceService,
cohortGetter,
generationCacheHelper,
useAsyncCohortGeneration
);
Step generateLocalCohortStep = stepBuilderFactory.get(analysisTypeName + ".generateCohort")
.tasklet(generateLocalCohortTasklet)
.build();
Step generateAnalysisStep = stepBuilderFactory.get(analysisTypeName + ".generate")
.tasklet(analysisTasklet)
.exceptionHandler(exceptionHandler)
.build();
DropCohortTableListener dropCohortTableListener = new DropCohortTableListener(jdbcTemplate, transactionTemplate, sourceService, sourceAwareSqlRender);
SimpleJobBuilder generateJobBuilder = jobBuilders.get(analysisTypeName)
.start(createCohortTableStep)
.next(generateLocalCohortStep)
.next(generateAnalysisStep)
.listener(dropCohortTableListener)
.listener(new AutoremoveJobListener(jobService));
return generateJobBuilder;
}
protected void addSessionParams(JobParametersBuilder builder, String sessionId) {
builder.addString(SESSION_ID, sessionId);
builder.addString(TARGET_TABLE, GenerationUtils.getTempCohortTableName(sessionId));
}
public SimpleJobBuilder buildJobForExecutionEngineBasedAnalysisTasklet(String analysisTypeName,
Source source,
JobParametersBuilder builder,
List<AnalysisFile> analysisFiles) {
final String sessionId = SessionUtils.sessionId();
addSessionParams(builder, sessionId);
CreateAnalysisTasklet createAnalysisTasklet = new CreateAnalysisTasklet(executionService, source.getSourceKey(), analysisFiles);
RunExecutionEngineTasklet runExecutionEngineTasklet = new RunExecutionEngineTasklet(executionService, source, analysisFiles);
ExecutionEngineCallbackTasklet callbackTasklet = new ExecutionEngineCallbackTasklet(executionEngineGenerationRepository, entityManager);
Step createAnalysisExecutionStep = stepBuilderFactory.get(analysisTypeName + ".createAnalysisExecution")
.tasklet(createAnalysisTasklet)
.build();
Step runExecutionStep = stepBuilderFactory.get(analysisTypeName + ".startExecutionEngine")
.tasklet(runExecutionEngineTasklet)
.build();
Step waitCallbackStep = stepBuilderFactory.get(analysisTypeName + ".waitForCallback")
.tasklet(callbackTasklet)
.build();
DropCohortTableListener dropCohortTableListener = new DropCohortTableListener(getSourceJdbcTemplate(source),
transactionTemplate, sourceService, sourceAwareSqlRender);
return jobBuilders.get(analysisTypeName)
.start(createAnalysisExecutionStep)
.next(runExecutionStep)
.next(waitCallbackStep)
.listener(dropCohortTableListener)
.listener(new AutoremoveJobListener(jobService));
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/GenerateSqlResult.java | src/main/java/org/ohdsi/webapi/common/generation/GenerateSqlResult.java | package org.ohdsi.webapi.common.generation;
import com.fasterxml.jackson.annotation.JsonProperty;
public class GenerateSqlResult {
@JsonProperty("templateSql")
public String templateSql;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/CommonGenerationDTO.java | src/main/java/org/ohdsi/webapi/common/generation/CommonGenerationDTO.java | package org.ohdsi.webapi.common.generation;
import java.util.Date;
public class CommonGenerationDTO {
private Long id;
private String status;
private String sourceKey;
private Integer hashCode;
private Date startTime;
private Date endTime;
private String exitMessage;
public Long getId() {
return id;
}
public void setId(final Long id) {
this.id = id;
}
public String getStatus() {
return status;
}
public void setStatus(final String status) {
this.status = status;
}
public String getSourceKey() {
return sourceKey;
}
public void setSourceKey(final String sourceKey) {
this.sourceKey = sourceKey;
}
public Integer getHashCode() {
return hashCode;
}
public void setHashCode(Integer hashCode) {
this.hashCode = hashCode;
}
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
public Date getEndTime() {
return endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
public String getExitMessage() {
return exitMessage;
}
public void setExitMessage(String exitMessage) {
this.exitMessage = exitMessage;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AutoremoveJobListener.java | src/main/java/org/ohdsi/webapi/common/generation/AutoremoveJobListener.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.service.JobService;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.listener.JobExecutionListenerSupport;
public class AutoremoveJobListener extends JobExecutionListenerSupport {
private final JobService jobService;
public AutoremoveJobListener(JobService jobService) {
this.jobService = jobService;
}
@Override
public void afterJob(JobExecution jobExecution) {
jobService.removeJob(jobExecution.getJobId());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationInfoEntity.java | src/main/java/org/ohdsi/webapi/common/generation/AnalysisGenerationInfoEntity.java | package org.ohdsi.webapi.common.generation;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "analysis_generation_info")
public class AnalysisGenerationInfoEntity {
@Id
@Column(name = "job_execution_id")
private Long id;
@Embedded
private AnalysisGenerationInfo info = new AnalysisGenerationInfo();
public void setId(Long id) {
this.id = id;
}
public String getDesign() {
return info.getDesign();
}
public void setDesign(String serializedDesign) {
this.info.design = serializedDesign;
this.info.hashCode = serializedDesign.hashCode();
}
public void setCreatedBy(UserEntity createdBy) {
this.info.createdBy = createdBy;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/analyses/CommonAnalysisDTO.java | src/main/java/org/ohdsi/webapi/common/analyses/CommonAnalysisDTO.java | package org.ohdsi.webapi.common.analyses;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
import org.ohdsi.webapi.user.dto.UserDTO;
import java.util.Date;
public class CommonAnalysisDTO extends CommonEntityDTO{
private Integer id;
private String name;
private String description;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/orm/EnumListType.java | src/main/java/org/ohdsi/webapi/common/orm/EnumListType.java | package org.ohdsi.webapi.common.orm;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.hibernate.type.descriptor.sql.VarcharTypeDescriptor;
import org.hibernate.usertype.DynamicParameterizedType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Properties;
public class EnumListType extends AbstractSingleColumnStandardBasicType<List> implements DynamicParameterizedType {
private static final Logger LOGGER = LoggerFactory.getLogger(EnumListType.class);
public static final String TYPE_NAME = "enum-list";
public EnumListType() {
super(VarcharTypeDescriptor.INSTANCE, null);
}
@Override
public String getName() {
return TYPE_NAME;
}
@Override
public String[] getRegistrationKeys() {
return new String[]{ getName(), "List", List.class.getName() };
}
@Override
public void setParameterValues(Properties properties) {
String enumClassName = properties.getProperty("enumClass");
try {
setJavaTypeDescriptor(new EnumListTypeDescriptor((Class<Enum>) Class.forName(enumClassName)));
} catch (ClassNotFoundException e) {
LOGGER.error("Failed to initialize enum list type", e);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/orm/EnumListTypeDescriptor.java | src/main/java/org/ohdsi/webapi/common/orm/EnumListTypeDescriptor.java | package org.ohdsi.webapi.common.orm;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.AbstractTypeDescriptor;
import java.util.*;
import java.util.stream.Collectors;
public class EnumListTypeDescriptor extends AbstractTypeDescriptor<List> {
public static final String DELIMITER = ",";
private Class<Enum> enumClass;
private Map<String, Enum> enumConstantMap = new HashMap<>();
protected EnumListTypeDescriptor(Class<Enum> enumClass) {
super(List.class);
this.enumClass = enumClass;
Enum[] enumConst = enumClass.getEnumConstants();
for(Enum value : enumConst) {
enumConstantMap.put(value.name(), value);
}
}
@Override
public List fromString(String s) {
List result = new ArrayList();
if (StringUtils.isNotBlank(s)) {
result = Arrays.stream(StringUtils.split(s, DELIMITER))
.map(v -> enumConstantMap.get(v))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
return result;
}
@Override
public String toString(List value) {
return (String) value.stream()
.map(Enum.class::cast)
.map(v -> ((Enum) v).name())
.collect(Collectors.joining(DELIMITER));
}
@Override
public <X> X unwrap(List value, Class<X> aClass, WrapperOptions wrapperOptions) {
if (Objects.isNull(value)) {
return null;
}
if (List.class.isAssignableFrom(aClass)) {
return (X)value;
}
if (String.class.isAssignableFrom(aClass)) {
return (X) toString(value);
}
throw unknownUnwrap(aClass);
}
@Override
public <X> List wrap(X value, WrapperOptions wrapperOptions) {
if (Objects.isNull(value)) {
return null;
}
if (value instanceof List) {
return (List)value;
}
if (value instanceof String) {
return fromString((String) value);
}
throw unknownWrap(value.getClass());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/sensitiveinfo/AbstractSensitiveInfoService.java | src/main/java/org/ohdsi/webapi/common/sensitiveinfo/AbstractSensitiveInfoService.java | package org.ohdsi.webapi.common.sensitiveinfo;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.ohdsi.circe.helper.ResourceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.BeanInitializationException;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.ParseException;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
public class AbstractSensitiveInfoService extends AbstractAdminService {
protected static final Logger LOGGER = LoggerFactory.getLogger(SensitiveFilter.class);
private Collection<SensitiveFilter> filters;
@PostConstruct
public void init() {
String filterSource = ResourceHelper.GetResourceAsString("/resources/generation/sensitive_filters.csv");
filters = new ArrayList<>();
try(Reader in = new StringReader(filterSource)) {
try(CSVParser parser = new CSVParser(in, CSVFormat.RFC4180)) {
for (final CSVRecord record : parser) {
filters.add(new SensitiveFilter(record.get(0), record.get(1)));
}
}
} catch (IOException e) {
throw new BeanInitializationException("Failed to read sensitive_filters.csv", e);
}
}
protected String filterSensitiveInfo(String text, Map<String, Object> variables, boolean isAdmin) {
String result = text;
if (Objects.nonNull(result) && !isAdmin) {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
if (Objects.nonNull(variables)) {
context.setVariables(variables);
}
//Apply filter
for (SensitiveFilter filter : filters) {
try {
String value = parser.parseExpression(filter.expression).getValue(context, String.class);
result = result.replaceAll(filter.regex, value);
} catch (ParseException e) {
LOGGER.warn("Cannot parse expression: {}", filter.expression, e);
}
}
}
return result;
}
public boolean isAdmin() {
return super.isAdmin();
}
static class SensitiveFilter {
private String regex;
private String expression;
public SensitiveFilter(String regex, String expression) {
this.regex = regex;
this.expression = expression;
}
public String getRegex() {
return regex;
}
public String getExpression() {
return expression;
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/common/sensitiveinfo/VariablesResolver.java | src/main/java/org/ohdsi/webapi/common/sensitiveinfo/VariablesResolver.java | package org.ohdsi.webapi.common.sensitiveinfo;
import java.util.Map;
@FunctionalInterface
public interface VariablesResolver<T> {
Map<String, Object> resolveVariables(T data);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.