repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/PreparedStatementRenderer.java | src/main/java/org/ohdsi/webapi/util/PreparedStatementRenderer.java | package org.ohdsi.webapi.util;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.BigQuerySparkTranslate;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.source.Source;
import org.springframework.jdbc.core.PreparedStatementSetter;
/**
* This class encapsulates the conversion of Sql to a java.sql.PreparedStatement format and sets its ordered arguments in a PreparedStatementSetter object.
* The conversion of the Sql to Prepared Statement format is necessary to eliminate SQL Injection risk. The PreparedStatement setter object is necessary
* to execute prepared statements using the Spring JdbcTemplate framework.
*
* @author DNS SHELLB (Brett Shelley)
*/
public class PreparedStatementRenderer implements ParameterizedSqlProvider {
private Source source;
private String sql;
private String sourceDialect = "sql server";
private String tempSchema = null;
private List<Object> orderedParamsList;
private String targetDialect = "sql server";
private String sessionId;
public List<Object> getOrderedParamsList() {
return orderedParamsList;
}
public Object[] getOrderedParams() {
if (orderedParamsList == null || orderedParamsList.isEmpty()) {
return new Object[0];
}
return getOrderedParamsList().toArray(new Object[getOrderedParamsList().size()]);
}
private PreparedStatementSetter preparedStatementSetter;
private Map<String, Object> paramValueMap = new HashMap<String, Object>();
/**
* constructs and generated prepared statement sql and a PreparedStatementSetter based on the supplied arguments.
*
* @param source the Source object needed to translate resulting SQL into the targeted dialect
* @param sqlResource the path to a sqlResource or the sql itself. The value will normally be the path to the sql within the classpath. However, if the sql is "hard-coded" and not loaded from a ".sql" file,
* then this argument will be treated as sql and not as a path reference to sql. If the sqlResource argument does not end with ".sql", then the sqlResource argument will be treated as literal sql.
* @param searchRegexes expressions that are to be replaced by a simple String.replaceAll operation. This would normally be the tableQualifierName marker like 'OMOP_schema'. However, these searchRegexes also opens up the
* possibility of inserting dynamically generated sql into a sql resource before it gets processed.
* @param replacementStrings the corresponding value to replace the searchRegex with. This is normally the tableQualiferValue like 'omop_v5' or some other schema.
* @param sqlVariableNames the names of the variables in the base sql
* @param sqlVariableValues the values of the variable names in the base sql
* @param sessionId the session id for the SqlTranslate methods
* @throws IllegalArgumentException if the arguments are not valid
*/
public PreparedStatementRenderer(Source source, String sqlResource, String[] searchRegexes, String[] replacementStrings, String[] sqlVariableNames, Object[] sqlVariableValues, String sessionId) {
super();
this.source = source;
validateArguments(source, sqlResource, searchRegexes, replacementStrings, sourceDialect, sqlVariableNames, sqlVariableValues);
/// this part does the heavy lifting, the calling classes can get needed items through getters
sql = PreparedSqlRender.removeSqlComments(sql);
updateSqlWithVariableSearchAndReplace(searchRegexes, replacementStrings);
paramValueMap = buildParamValueMap(sqlVariableNames, sqlVariableValues);
this.orderedParamsList = PreparedSqlRender.getOrderedListOfParameterValues(paramValueMap, sql);
// NOTE:
// Look below
this.orderedParamsList = this.orderedParamsList.stream().filter(Objects::nonNull).collect(Collectors.toList());
buildPreparedStatementSetter();
sql = PreparedSqlRender.fixPreparedStatementSql(
sql,
paramValueMap,
// NOTE:
// Current version of BigQuery driver has issues when NULLs are provided as variables for prepared statements (throws NPE)
// That's why in case of NULLs we paste them directly into code.
// And since:
// - queries processed through "PreparedStatementRenderer" are mainly one-off
// - sometimes SQL is translated in advance, therefore source is not passed into the constructor
// we apply the approach to all dialects
object -> object == null ? "NULL" : "?"
);
if (source != null) {
this.targetDialect = source.getSourceDialect();
try {
this.tempSchema = SourceUtils.getTempQualifier(source);
}
catch (Exception e) {
this.tempSchema = null;
}
}
this.sessionId = sessionId;
}
public PreparedStatementRenderer(Source source, String sqlResource, String[] searchRegexes, String[] replacementStrings, String sessionId) {
this(source, sqlResource, searchRegexes, replacementStrings, null, null, sessionId);
}
public PreparedStatementRenderer(Source source, String resourcePath, String searchRegex, String replacementString) {
this(source, resourcePath, new String[]{searchRegex}, new String[]{replacementString}, new String[0], new Object[0], null);
}
public PreparedStatementRenderer(Source source, String resourcePath, String[] searchRegexes, String[] replacementStrings, String[] sqlVariableNames, Object[] sqlVariableValues) {
this(source, resourcePath, searchRegexes, replacementStrings, sqlVariableNames, sqlVariableValues, null);
}
public PreparedStatementRenderer(Source source, String resourcePath, String[] searchRegexes, String[] replacementStrings, String sqlVariableName, Object sqlVariableValue) {
this(source, resourcePath, searchRegexes, replacementStrings, new String[]{sqlVariableName}, new Object[]{sqlVariableValue}, null);
}
public PreparedStatementRenderer(Source source, String resourcePath, String searchRegex, String replacementString, String[] sqlVariableNames, Object[] sqlVariableValues) {
this(source, resourcePath, new String[]{searchRegex}, new String[]{replacementString}, sqlVariableNames, sqlVariableValues, null);
}
public PreparedStatementRenderer(Source source, String resourcePath, String searchRegex, String replacementString, String sqlVariableName, Object sqlVariableValue) {
this(source, resourcePath, new String[]{searchRegex}, new String[]{replacementString}, new String[]{sqlVariableName}, new Object[]{sqlVariableValue}, null);
}
public PreparedStatementRenderer(Source source, String resourcePath, String searchRegex, String replacementString, String sqlVariableName, Object sqlVariableValue, String sessionId) {
this(source, resourcePath, new String[]{searchRegex}, new String[]{replacementString}, new String[]{sqlVariableName}, new Object[]{sqlVariableValue}, sessionId);
}
public PreparedStatementRenderer(Source source, String resourcePath, String searchRegex, String replacementString, String[] sqlVariableNames, Object[] sqlVariableValues, String sessionId) {
this(source, resourcePath, new String[]{searchRegex}, new String[]{replacementString}, sqlVariableNames, sqlVariableValues, sessionId);
}
private final void updateSqlWithVariableSearchAndReplace(String[] searchRegexes, String[] replacementStrings) {
/// a simple search and replace works for tableQualifierNames and values
if (searchRegexes != null && replacementStrings != null) {
for (int i = 0; i < searchRegexes.length; i++) {
String searchRegex = searchRegexes[i];
String replacement = replacementStrings[i];
if (searchRegex != null && !searchRegex.trim().isEmpty()) {
String regexToReplace = null;
if (searchRegex.startsWith("/*")) {
regexToReplace = Pattern.quote(searchRegex);
} else {
regexToReplace = Pattern.quote(searchRegex.startsWith("@") ? searchRegex : ("@" + searchRegex));
}
if (sql.contains("--")) {
String stringToReplace = searchRegexes[i];
if (!stringToReplace.startsWith("@")) stringToReplace = "@" + stringToReplace;
sql = sql.replaceAll(stringToReplace, replacement);
} else {
sql = sql.replaceAll(regexToReplace, replacement);
}
}
}
}
}
final void validateArguments(Source source, String sqlResource, String[] tableQualifierNames, String[] tableQualifierValues, String sourceDialect, String[] sqlVariableNames, Object[] sqlVariableValues) {
validateAndLoadSql(sqlResource);
if (tableQualifierNames != null && tableQualifierValues != null) {
if (tableQualifierNames.length != tableQualifierValues.length) {
throw new IllegalArgumentException("'tableQualifierNames' array argument must have the same length of 'tableQualifierValues' array argument");
}
for (int i = 0; i < tableQualifierNames.length; i++) {
String tableQualifierName = tableQualifierNames[i];
String tableQualifierValue = tableQualifierValues[i];
if (tableQualifierName != null && !tableQualifierName.trim().isEmpty()) {
if (tableQualifierValue == null) {
throw new IllegalArgumentException("'tableQualifierValue' argument cannot be null or empty string when 'tableQualifierName' argument has been specified");
}
}
}
}
if (sqlVariableNames == null && sqlVariableValues != null) {
throw new IllegalArgumentException("'sqlVariableNames' argument is null");
}
if (sqlVariableValues == null && sqlVariableNames != null) {
throw new IllegalArgumentException("'sqlVariableValues' argument is null");
}
paramValueMap = buildParamValueMap(sqlVariableNames, sqlVariableValues);
}
final void validateAndLoadSql(String sqlResource) {
if (sqlResource == null) {
String message = "'sqlResource' argument cannot be null; 'sqlResource' argument is needed to load sql from classpath";
throw new IllegalArgumentException(message);
}
if (sqlResource.trim().isEmpty()) {
String message = "'sqlResource' argument cannot be null; 'sqlResource' argument is needed to load sql from classpath";
throw new IllegalArgumentException(message);
}
/// determine if sql is a resource or sql directly
if (sqlResource.toLowerCase(Locale.ENGLISH).endsWith(".sql")) {
sql = ResourceHelper.GetResourceAsString(sqlResource);
if (sql == null || sql.trim().isEmpty())
throw new RuntimeException("sql string could not be loaded from 'sqlResource' argument");
} else {
/// we assume the sql is not a resource reference, but is actually the Sql string itself
sql = sqlResource;
}
}
final void buildPreparedStatementSetter() {
preparedStatementSetter = new OrderedPreparedStatementSetter(orderedParamsList);
}
/**
* returns a map containing an equal number of parameters and values.
*
* @param parameters
* @param values
* @return
*/
final Map<String, Object> buildParamValueMap(String[] parameters, Object[] values) {
Map<String, Object> result = new HashMap<>();
if (parameters == null || values == null) return result;
if (parameters.length != values.length) {
String message = "Arrays sizes do not match: parameters length of " + parameters.length + " is not equal to values length of " + values.length;
throw new IllegalArgumentException(message);
}
for (int i = 0; i < parameters.length; i++) {
String parameter = parameters[i];
Object value = values[i];
if (parameter == null && value != null) {
throw new IllegalArgumentException("null value found in 'parameters' argument");
} else if (parameter != null && parameter.trim().isEmpty() && value != null) ///fortify
{
throw new IllegalArgumentException("empty string parameter value found in 'parameters' argument");
}
// else if ( parameter!=null && !parameter.trim().isEmpty() && value==null )
// {
// throw new IllegalArgumentException("null value found in 'values' argument");
// }
result.put(parameter, convertPrimitiveArraysToWrapperArrays(value));
}
return result;
}
public String getSql() {
return SqlTranslate.translateSingleStatementSql(sql, targetDialect, sessionId, tempSchema);
}
public PreparedStatementSetter getSetter() {
return preparedStatementSetter;
}
private Object convertPrimitiveArraysToWrapperArrays(Object value) {
if (value == null) return null;
if (!value.getClass().isArray()) return value;
if (value instanceof boolean[]) return ArrayUtils.toObject((boolean[]) value);
if (value instanceof byte[]) return ArrayUtils.toObject((byte[]) value);
if (value instanceof char[]) return ArrayUtils.toObject((char[]) value);
if (value instanceof double[]) return ArrayUtils.toObject((double[]) value);
if (value instanceof float[]) return ArrayUtils.toObject((float[]) value);
if (value instanceof int[]) return ArrayUtils.toObject((int[]) value);
if (value instanceof long[]) return ArrayUtils.toObject((long[]) value);
if (value instanceof short[]) return ArrayUtils.toObject((short[]) value);
return value;
}
public String generateDebugSql(String sql, String[] searchRegexes, String[] replacementStrings, String[] sqlVariableNames, Object[] sqlVariableValues) {
String[] vars = Stream.concat(Stream.of(searchRegexes), Stream.of(sqlVariableNames)).toArray(String[]::new);
String[] vals = Stream.concat(Stream.of(replacementStrings),
Stream.of(sqlVariableValues)).map((v) -> {
Object obj = convertPrimitiveArraysToWrapperArrays(v);
String result = String.valueOf(obj);
if (obj instanceof String[]) {
result = "'" + StringUtils.join((Object[])v,"','") + "'";
} else if (obj instanceof Object[]) {
result = StringUtils.join((Object[])obj,",");
}
return result;
})
.toArray(size -> new String[size]);
return SqlRender.renderSql(sql, vars, vals);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/ManagedThreadPoolTaskExecutor.java | src/main/java/org/ohdsi/webapi/util/ManagedThreadPoolTaskExecutor.java | package org.ohdsi.webapi.util;
import org.springframework.jmx.export.annotation.ManagedAttribute;
import org.springframework.jmx.export.annotation.ManagedResource;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
@ManagedResource
public class ManagedThreadPoolTaskExecutor extends ThreadPoolTaskExecutor {
@ManagedAttribute
@Override
public int getCorePoolSize() {
return super.getCorePoolSize();
}
@ManagedAttribute
@Override
public int getMaxPoolSize() {
return super.getMaxPoolSize();
}
@ManagedAttribute
@Override
public int getKeepAliveSeconds() {
return super.getKeepAliveSeconds();
}
@ManagedAttribute
@Override
public int getPoolSize() {
return super.getPoolSize();
}
@ManagedAttribute
@Override
public int getActiveCount() {
return super.getActiveCount();
}
@ManagedAttribute
public long getCompletedTaskCount() {
Optional<ThreadPoolExecutor> executor = Optional.ofNullable(getThreadPoolExecutor());
return executor.map(ThreadPoolExecutor::getCompletedTaskCount).orElse(0L);
}
@ManagedAttribute
public int getLargestPoolSize() {
Optional<ThreadPoolExecutor> executor = Optional.ofNullable(getThreadPoolExecutor());
return executor.map(ThreadPoolExecutor::getLargestPoolSize).orElse(0);
}
@ManagedAttribute
public long getTaskCount() {
Optional<ThreadPoolExecutor> executor = Optional.ofNullable(getThreadPoolExecutor());
return executor.map(ThreadPoolExecutor::getTaskCount).orElse(0L);
}
@ManagedAttribute
public int getQueuedTaskCount() {
ThreadPoolExecutor executor = getThreadPoolExecutor();
BlockingQueue<Runnable> queue = Objects.nonNull(executor) ? executor.getQueue() : null;
return Objects.nonNull(queue) ? queue.size() : 0;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/HttpUtils.java | src/main/java/org/ohdsi/webapi/util/HttpUtils.java | package org.ohdsi.webapi.util;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.OutputStream;
public class HttpUtils {
public static Response respondBinary(OutputStream stream, String filename) {
return Response
.ok(stream)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", String.format("attachment; filename=\"%s\"", filename))
.build();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/PreparedStatementWithParamsCreator.java | src/main/java/org/ohdsi/webapi/util/PreparedStatementWithParamsCreator.java | package org.ohdsi.webapi.util;
import org.springframework.jdbc.core.PreparedStatementCreator;
public interface PreparedStatementWithParamsCreator extends PreparedStatementCreator, ParameterizedSqlProvider {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/DataSourceDTOParser.java | src/main/java/org/ohdsi/webapi/util/DataSourceDTOParser.java | package org.ohdsi.webapi.util;
import com.odysseusinc.arachne.commons.types.DBMSType;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.AuthMethod;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.DataSourceUnsecuredDTO;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.file.Paths;
import java.util.*;
import com.odysseusinc.arachne.execution_engine_common.util.BigQueryUtils;
import com.odysseusinc.arachne.execution_engine_common.util.ConnectionParams;
import com.odysseusinc.arachne.execution_engine_common.util.ConnectionParamsParser;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.KerberosUtils;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import static com.odysseusinc.arachne.commons.types.DBMSType.BIGQUERY;
import static com.odysseusinc.arachne.commons.types.DBMSType.IMPALA;
public final class DataSourceDTOParser {
public static DataSourceUnsecuredDTO parseDTO(Source source) {
ConnectionParams params = parse(source);
DataSourceUnsecuredDTO dto = new DataSourceUnsecuredDTO();
dto.setName(source.getSourceName());
dto.setType(getDbmsType(source));
dto.setConnectionString(params.getConnectionString());
dto.setUsername(params.getUser());
dto.setPassword(params.getPassword());
if (Objects.equals(IMPALA, dto.getType()) && AuthMethod.KERBEROS == params.getAuthMethod()) {
KerberosUtils.setKerberosParams(source, params, dto);
}
if (Objects.equals(BIGQUERY, dto.getType())) {
try {
dto.setKeyfile(getKeyfile(source));
} catch (IOException ignored) {
}
}
dto.setCdmSchema(source.getTableQualifierOrNull(SourceDaimon.DaimonType.CDM));
dto.setVocabularySchema(source.getTableQualifierOrNull(SourceDaimon.DaimonType.Vocabulary));
dto.setResultSchema(source.getTableQualifierOrNull(SourceDaimon.DaimonType.Results));
return dto;
}
static private byte[] getKeyfile(Source source) throws IOException {
if (ArrayUtils.isNotEmpty(source.getKeyfile())) {
return source.getKeyfile();
} else {
String keyPath = BigQueryUtils.getBigQueryKeyPath(source.getSourceConnection());
if (StringUtils.isNotEmpty(keyPath) && Paths.get(keyPath).toFile().exists()) {
try(Reader r = new FileReader(new File(keyPath))) {
return IOUtils.toByteArray(r, Charset.defaultCharset());
}
}
}
return null;
}
private static ConnectionParams parse(Source source) {
Objects.requireNonNull(source, "Source should not be null");
ConnectionParams dto = ConnectionParamsParser.parse(getDbmsType(source), source.getSourceConnection());
if (Objects.isNull(dto.getUser())) {
dto.setUser(source.getUsername());
}
if (Objects.isNull(dto.getPassword())) {
dto.setPassword(source.getPassword());
}
return dto;
}
private static DBMSType getDbmsType(Source source) {
return Arrays.stream(DBMSType.values())
.filter(type -> Objects.equals(type.getOhdsiDB(), source.getSourceDialect()))
.findFirst()
.orElseThrow(() -> new RuntimeException(String.format("Unsupported data source dialect: %s", source.getSourceDialect())));
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/StatementCancelException.java | src/main/java/org/ohdsi/webapi/util/StatementCancelException.java | package org.ohdsi.webapi.util;
public class StatementCancelException extends RuntimeException {
@Override
public String getMessage() {
return "statement cannot be set - already cancelled";
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/EntityUtils.java | src/main/java/org/ohdsi/webapi/util/EntityUtils.java | package org.ohdsi.webapi.util;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraphUtils;
public class EntityUtils {
private EntityUtils() {
}
public static EntityGraph fromAttributePaths(final String... strings) {
return EntityGraphUtils.fromAttributePaths(strings);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/PreparedStatementRendererCreator.java | src/main/java/org/ohdsi/webapi/util/PreparedStatementRendererCreator.java | package org.ohdsi.webapi.util;
import org.springframework.jdbc.core.PreparedStatementCreator;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.jdbc.core.SqlProvider;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
public class PreparedStatementRendererCreator implements PreparedStatementWithParamsCreator, PreparedStatementCreator, SqlProvider {
private final PreparedStatementRenderer psr;
public PreparedStatementRendererCreator(PreparedStatementRenderer psr) {
this.psr = psr;
}
@Override
public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
PreparedStatement statement = con.prepareStatement(psr.getSql(),
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
PreparedStatementSetter setter = psr.getSetter();
if (Objects.nonNull(setter)) {
setter.setValues(statement);
}
return statement;
}
@Override
public String getSql() {
return psr.getSql();
}
@Override
public List<Object> getOrderedParamsList() {
return psr.getOrderedParamsList();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/ParameterizedSqlProvider.java | src/main/java/org/ohdsi/webapi/util/ParameterizedSqlProvider.java | package org.ohdsi.webapi.util;
import org.springframework.jdbc.core.SqlProvider;
import java.util.List;
public interface ParameterizedSqlProvider extends SqlProvider {
List<Object> getOrderedParamsList();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/SessionUtils.java | src/main/java/org/ohdsi/webapi/util/SessionUtils.java | package org.ohdsi.webapi.util;
import org.ohdsi.sql.SqlTranslate;
/**
*
*/
public final class SessionUtils {
public static final String sessionId() {
return SqlTranslate.generateSessionId();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/SourceUtils.java | src/main/java/org/ohdsi/webapi/util/SourceUtils.java | package org.ohdsi.webapi.util;
import org.apache.commons.lang3.ObjectUtils;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import static org.ohdsi.webapi.source.SourceDaimon.DaimonType.CDM;
import static org.ohdsi.webapi.source.SourceDaimon.DaimonType.Results;
import static org.ohdsi.webapi.source.SourceDaimon.DaimonType.Temp;
import static org.ohdsi.webapi.source.SourceDaimon.DaimonType.Vocabulary;
public class SourceUtils {
public static String getVocabularyQualifier(Source source) {
return source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
}
public static String getCdmQualifier(Source source) {
return source.getTableQualifier(CDM);
}
public static String getTempQualifier(Source source, String backup) {
return ObjectUtils.firstNonNull( source.getTableQualifierOrNull(Temp), backup);
}
public static String getTempQualifier(Source source) {
return ObjectUtils.firstNonNull( source.getTableQualifierOrNull(Temp), getResultsQualifier(source));
}
public static String getTempQualifierOrNull(Source source) {
return source.getTableQualifierOrNull(Temp) != null ? source.getTableQualifierOrNull(Temp) : getResultsQualifierOrNull(source);
}
public static String getResultsQualifier(Source source) {
return source.getTableQualifier(Results);
}
public static String getResultsQualifierOrNull(Source source) {
return source.getTableQualifierOrNull(Results);
}
public static String getVocabQualifierOrNull(Source source) {
return source.getTableQualifierOrNull(Vocabulary);
}
public static boolean hasSourceDaimon(Source source, SourceDaimon.DaimonType daimonType) {
boolean result = source.getDaimons().stream().anyMatch(d -> daimonType.equals(d.getDaimonType()));
if (!result && Vocabulary.equals(daimonType)) {
result = hasSourceDaimon(source, CDM);
}
return result;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/SecurityUtils.java | src/main/java/org/ohdsi/webapi/util/SecurityUtils.java | package org.ohdsi.webapi.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.io.IOException;
import java.sql.SQLException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.dao.DataAccessException;
import org.springframework.transaction.TransactionException;
public class SecurityUtils {
public static Object whitelist(Object object) {
return object;
}
public static int whitelist(int object) {
return object;
}
public static String whitelist(String object) {
return object;
}
public static String whitelist(Exception exception) {
if (exception instanceof JobInstanceAlreadyCompleteException) {
return "Job instance already complete exception";
} else if (exception instanceof JsonProcessingException) {
return "Json processing exception";
} else if (exception instanceof IOException) {
return "IO exception";
} else if (exception instanceof TransactionException) {
return "Transaction exception";
} else if (exception instanceof DataAccessException) {
return "Data access exception";
} else if (exception instanceof SQLException) {
return "SQL exception";
}
return exception.getMessage();
}
public static void sleep(int ms) {
try {
Thread.sleep(ms);
} catch (InterruptedException e) {
Thread.currentThread().interrupt(); // Reset interrupted status
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/ExceptionUtils.java | src/main/java/org/ohdsi/webapi/util/ExceptionUtils.java | package org.ohdsi.webapi.util;
import java.util.Objects;
import javax.ws.rs.NotFoundException;
public class ExceptionUtils {
public static void throwNotFoundExceptionIfNull(Object entity, String message) throws NotFoundException {
if (Objects.isNull(entity)) {
throw new NotFoundException(message);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/ExpiringMultimap.java | src/main/java/org/ohdsi/webapi/util/ExpiringMultimap.java | /*
*
* Copyright 2018 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Authors: Pavel Grafkin
*
*/
package org.ohdsi.webapi.util;
import org.apache.commons.collections4.map.PassiveExpiringMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class ExpiringMultimap<K, V> {
private Integer timeToLiveMs;
private Map<K, PassiveExpiringMap<V, Object>> map = new HashMap<>();
public ExpiringMultimap(Integer timeToLiveMs) {
this.timeToLiveMs = timeToLiveMs;
}
public synchronized List<V> get(K key) {
return map.containsKey(key)
? map.get(key).entrySet().stream().map(Map.Entry::getKey).collect(Collectors.toList())
: new ArrayList<>();
}
public synchronized void put(K key, V value) {
PassiveExpiringMap<V, Object> storage = map.computeIfAbsent(key, k -> new PassiveExpiringMap<>(timeToLiveMs));
storage.put(value, null);
map.put(key, storage);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/UserUtils.java | src/main/java/org/ohdsi/webapi/util/UserUtils.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.util;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import java.util.Objects;
public class UserUtils {
public static String nullSafeLogin(UserEntity user) {
return Objects.nonNull(user) ? user.getLogin() : "";
}
public static String toLowerCase(String input) {
return Objects.nonNull(input) ? input.toLowerCase() : input;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/TempTableCleanupManager.java | src/main/java/org/ohdsi/webapi/util/TempTableCleanupManager.java | package org.ohdsi.webapi.util;
import com.google.common.collect.ImmutableSet;
import com.odysseusinc.arachne.commons.types.DBMSType;
import org.ohdsi.sql.SqlSplit;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapperResultSetExtractor;
import org.springframework.transaction.support.TransactionTemplate;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
public class TempTableCleanupManager {
private static final Logger LOGGER = LoggerFactory.getLogger(TempTableCleanupManager.class);
private static final String[] TABLE_TYPES = { "TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY" };
private static final String DROP_TABLE_STATEMENT = "IF OBJECT_ID('%1$s', 'U') IS NOT NULL DROP TABLE %s;\n";
private static final Set<String> APPLICABLE_DIALECTS = ImmutableSet.of(
DBMSType.ORACLE.getOhdsiDB(),
DBMSType.IMPALA.getOhdsiDB(),
DBMSType.BIGQUERY.getOhdsiDB(),
DBMSType.SPARK.getOhdsiDB(),
// For Cohort Characterization which uses 'permanent temp tables'
DBMSType.MS_SQL_SERVER.getOhdsiDB(),
DBMSType.PDW.getOhdsiDB()
);
private JdbcTemplate jdbcTemplate;
private TransactionTemplate transactionTemplate;
private String dialect;
private String sessionId;
private String tempSchema;
public TempTableCleanupManager(JdbcTemplate jdbcTemplate, TransactionTemplate transactionTemplate, String dialect, String sessionId, String tempSchema) {
this.jdbcTemplate = jdbcTemplate;
this.transactionTemplate = transactionTemplate;
this.dialect = dialect;
this.sessionId = sessionId;
this.tempSchema = tempSchema;
}
protected boolean isApplicable(String dialect) {
return APPLICABLE_DIALECTS.contains(dialect);
}
public void cleanupTempTables() {
LOGGER.info("Removing temp tables at {}", tempSchema);
transactionTemplate.execute(status -> {
try {
Connection c = jdbcTemplate.getDataSource().getConnection();
if (isApplicable(this.dialect)) {
removeTempTables(c, sessionId + "%");
}
} catch (SQLException e) {
LOGGER.error("Failed to cleanup temp tables", e);
throw new RuntimeException(e);
}
return null;
});
}
private void removeTempTables(Connection c, String tablePrefix) throws SQLException {
DatabaseMetaData metaData = c.getMetaData();
try (ResultSet resultSet = metaData.getTables(null, tempSchema, tablePrefix, TABLE_TYPES)) {
RowMapperResultSetExtractor<String> extractor = new RowMapperResultSetExtractor<>((rs, rowNum) -> rs.getString("TABLE_NAME"));
List<String> tableNames = extractor.extractData(resultSet);
String sql = tableNames.stream().map(table -> String.format(DROP_TABLE_STATEMENT, tempSchema + "." + table)).collect(Collectors.joining());
String translatedSql = SqlTranslate.translateSql(sql, dialect);
Arrays.asList(SqlSplit.splitSql(translatedSql)).forEach(jdbcTemplate::execute);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/ConversionUtils.java | src/main/java/org/ohdsi/webapi/util/ConversionUtils.java | package org.ohdsi.webapi.util;
import org.ohdsi.webapi.model.CommonEntity;
import org.ohdsi.webapi.model.CommonEntityExt;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
import org.ohdsi.webapi.service.dto.CommonEntityExtDTO;
import org.ohdsi.webapi.tag.dto.TagDTO;
import org.ohdsi.webapi.user.dto.UserDTO;
import org.springframework.core.convert.support.GenericConversionService;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
public class ConversionUtils {
public static void convertMetadataExt(GenericConversionService conversionService, CommonEntityExt<? extends Number> source, CommonEntityExtDTO target) {
ConversionUtils.convertMetadata(conversionService, source, target);
if (Objects.nonNull(source.getTags())) {
Set<TagDTO> tags = new HashSet<>();
source.getTags().forEach(tag -> {
TagDTO tagDTO = conversionService.convert(tag, TagDTO.class);
if (Objects.nonNull(tag.getGroups())) {
tag.getGroups().forEach(group -> {
TagDTO groupDTO = conversionService.convert(group, TagDTO.class);
tags.add(groupDTO);
});
}
tags.add(tagDTO);
});
target.setTags(tags);
}
}
public static void convertMetadata(GenericConversionService conversionService, CommonEntity<? extends Number> source, CommonEntityDTO target) {
target.setCreatedBy(conversionService.convert(source.getCreatedBy(), UserDTO.class));
target.setCreatedDate(source.getCreatedDate());
target.setModifiedBy(conversionService.convert(source.getModifiedBy(), UserDTO.class));
target.setModifiedDate(source.getModifiedDate());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/PreparedSqlRender.java | src/main/java/org/ohdsi/webapi/util/PreparedSqlRender.java | package org.ohdsi.webapi.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.odysseusinc.arachne.commons.types.DBMSType;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.source.Source;
public class PreparedSqlRender {
public static String removeSqlComments(String sql) {
return sql.replaceAll("(--.*)", "").replaceAll("\\/\\*([\\S\\s]+?)\\*\\/", "");
}
public static String fixPreparedStatementSql(String sql, Map<String, Object> paramValueMap, Function<Object, String> replacementResolver) {
for (Map.Entry<String, Object> entry : paramValueMap.entrySet()) {
Object value = entry.getValue();
if (value instanceof String || value instanceof Integer || value instanceof Long || value == null) {
String replacement = replacementResolver.apply(value);
sql = sql.replace("'%@" + entry.getKey() + "%'", replacement);
sql = sql.replace("'@" + entry.getKey() + "'", replacement);
sql = sql.replace("@" + entry.getKey(), replacement);
} else if (entry.getValue() instanceof Object[]) {
int length = ((Object[]) entry.getValue()).length;
sql = sql.replace("@" + entry.getKey(), StringUtils.repeat("?", ",", length));
}
}
return sql;
}
public static List<Object> getOrderedListOfParameterValues(Map<String, Object> paramValueMap, String sql) {
List<Object> result = new ArrayList<>();
String regex = "(@\\w+)|(%@\\w+%)";
Pattern p = Pattern.compile(regex, Pattern.UNICODE_CHARACTER_CLASS);
Matcher matcher = p.matcher(sql);
while (matcher.find()) {
String group = matcher.group();
String param = group.replace("@", "").replace(")", "").trim();//.toLowerCase();
if (param.contains("%")) {
param = param.replace("%", "");
addToList(result, "%" + paramValueMap.get(param) + "%");
} else {
addToList(result, paramValueMap.get(param));
}
}
return result;
}
private static void addToList(List<Object> result, Object value) {
if (value instanceof String || value instanceof Integer || value instanceof Long || value == null) {
result.add(value);
} else if (value instanceof String[]) {
result.addAll(Arrays.asList((String[]) value));
} else if (value instanceof Long[]) {
result.addAll(Arrays.asList((Long[]) value));
} else if (value instanceof Integer[]) {
result.addAll(Arrays.asList((Integer[]) value));
} else if (value instanceof Object[]) {
result.addAll(Arrays.asList((Object[]) value));
}
}
// Given a source, determine how many parameters are allowed for IN clauses
// when using prepared statements. This function will return 30000 if there
// is no known limit otherwise it will return the value based on the
// sourceDialect property of the source object
public static int getParameterLimit(Source source) {
int returnVal = 30000;
String sourceDialect = source.getSourceDialect().toLowerCase();
if (sourceDialect.equals(DBMSType.ORACLE.getOhdsiDB())) {
returnVal = 990;
} else if (sourceDialect.equals(DBMSType.MS_SQL_SERVER.getOhdsiDB()) || sourceDialect.equals(DBMSType.PDW.getOhdsiDB())) {
returnVal = 2000;
} else if (sourceDialect.equals(DBMSType.BIGQUERY.getOhdsiDB()) || sourceDialect.equals(DBMSType.SNOWFLAKE.getOhdsiDB())) {
returnVal = 10000;
}
return returnVal;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/OrderedPreparedStatementSetter.java | src/main/java/org/ohdsi/webapi/util/OrderedPreparedStatementSetter.java | package org.ohdsi.webapi.util;
import org.springframework.jdbc.core.PreparedStatementSetter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
public class OrderedPreparedStatementSetter implements PreparedStatementSetter {
private final List<Object> orderedParamsList;
public OrderedPreparedStatementSetter(List<Object> orderedParamsList) {
this.orderedParamsList = orderedParamsList;
}
@Override
public void setValues(PreparedStatement ps) throws SQLException {
if (Objects.nonNull(orderedParamsList)) {
for (int i = 0; i < orderedParamsList.size(); i++) {
ps.setObject(i + 1, orderedParamsList.get(i));
}
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/StatementCancel.java | src/main/java/org/ohdsi/webapi/util/StatementCancel.java | package org.ohdsi.webapi.util;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Objects;
public class StatementCancel {
private Statement statement;
private boolean canceled = false;
public synchronized void setStatement(Statement statement) {
if (this.canceled) {
throw new StatementCancelException();
}
this.statement = statement;
}
public synchronized void cancel() throws SQLException {
this.canceled = true;
if (Objects.nonNull(statement)) {
statement.cancel();
}
}
public synchronized boolean isCanceled() {
return canceled;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/util/GenericExceptionMapper.java | src/main/java/org/ohdsi/webapi/util/GenericExceptionMapper.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.util;
import org.ohdsi.webapi.exception.BadRequestAtlasException;
import org.ohdsi.webapi.exception.ConceptNotExistException;
import org.ohdsi.webapi.exception.ConversionAtlasException;
import org.ohdsi.webapi.exception.UserException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.shiro.authz.UnauthorizedException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.messaging.support.ErrorMessage;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.UndeclaredThrowableException;
import java.util.Objects;
import org.ohdsi.webapi.vocabulary.ConceptRecommendedNotInstalledException;
/**
*
* @author fdefalco
*/
@Provider
public class GenericExceptionMapper implements ExceptionMapper<Throwable> {
private static final Logger LOGGER = LoggerFactory.getLogger(GenericExceptionMapper.class);
private final String DETAIL = "Detail: ";
@Override
public Response toResponse(Throwable ex) {
StringWriter errorStackTrace = new StringWriter();
ex.printStackTrace(new PrintWriter(errorStackTrace));
LOGGER.error(errorStackTrace.toString());
Status responseStatus;
if (ex instanceof DataIntegrityViolationException) {
responseStatus = Status.CONFLICT;
String cause = ex.getCause().getCause().getMessage();
cause = cause.substring(cause.indexOf(DETAIL) + DETAIL.length());
ex = new RuntimeException(cause);
} else if (ex instanceof UnauthorizedException || ex instanceof ForbiddenException) {
responseStatus = Status.FORBIDDEN;
} else if (ex instanceof NotFoundException) {
responseStatus = Status.NOT_FOUND;
} else if (ex instanceof BadRequestException) {
responseStatus = Status.BAD_REQUEST;
} else if (ex instanceof UndeclaredThrowableException) {
Throwable throwable = getThrowable((UndeclaredThrowableException)ex);
if (Objects.nonNull(throwable)) {
if (throwable instanceof UnauthorizedException || throwable instanceof ForbiddenException) {
responseStatus = Status.FORBIDDEN;
} else if (throwable instanceof BadRequestAtlasException || throwable instanceof ConceptNotExistException) {
responseStatus = Status.BAD_REQUEST;
ex = throwable;
} else if (throwable instanceof ConversionAtlasException) {
responseStatus = Status.BAD_REQUEST;
// New exception must be created or direct self-reference exception will be thrown
ex = new RuntimeException(throwable.getMessage());
} else {
responseStatus = Status.INTERNAL_SERVER_ERROR;
ex = new RuntimeException("An exception occurred: " + ex.getClass().getName());
}
} else {
responseStatus = Status.INTERNAL_SERVER_ERROR;
ex = new RuntimeException("An exception occurred: " + ex.getClass().getName());
}
} else if (ex instanceof UserException) {
responseStatus = Status.INTERNAL_SERVER_ERROR;
// Create new message to prevent sending error information to client
ex = new RuntimeException(ex.getMessage());
} else if (ex instanceof ConceptNotExistException) {
responseStatus = Status.BAD_REQUEST;
} else if (ex instanceof ConceptRecommendedNotInstalledException) {
responseStatus = Status.NOT_IMPLEMENTED;
} else {
responseStatus = Status.INTERNAL_SERVER_ERROR;
// Create new message to prevent sending error information to client
ex = new RuntimeException("An exception occurred: " + ex.getClass().getName());
}
// Clean stacktrace, but keep message
ex.setStackTrace(new StackTraceElement[0]);
ErrorMessage errorMessage = new ErrorMessage(ex);
return Response.status(responseStatus)
.entity(errorMessage)
.type(MediaType.APPLICATION_JSON)
.build();
}
private Throwable getThrowable(UndeclaredThrowableException ex) {
if (Objects.nonNull(ex.getUndeclaredThrowable()) && ex.getUndeclaredThrowable() instanceof InvocationTargetException) {
InvocationTargetException ite = (InvocationTargetException) ex.getUndeclaredThrowable();
return ite.getTargetException();
}
return null;
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exampleapplication/ExampleApplicationWithJobService.java | src/main/java/org/ohdsi/webapi/exampleapplication/ExampleApplicationWithJobService.java | package org.ohdsi.webapi.exampleapplication;
import org.apache.commons.lang3.RandomStringUtils;
import org.ohdsi.circe.vocabulary.Concept;
import org.ohdsi.webapi.exampleapplication.model.Widget;
import org.ohdsi.webapi.exampleapplication.repository.WidgetRepository;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManager;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.ArrayList;
import java.util.List;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary Example
*/
@Path("/example")
public class ExampleApplicationWithJobService extends AbstractDaoService {
public static final String EXAMPLE_JOB_NAME = "OhdsiExampleJob";
public static final String EXAMPLE_STEP_NAME = "OhdsiExampleStep";
@Autowired
private JobTemplate jobTemplate;
@Autowired
private WidgetRepository widgetRepository;
@Autowired
private TransactionTemplate transactionTemplate;
@Autowired
private EntityManager em;
public static class ExampleApplicationTasklet implements Tasklet {
private static final Logger log = LoggerFactory.getLogger(ExampleApplicationTasklet.class);
private final List<Concept> concepts;
public ExampleApplicationTasklet(final List<Concept> concepts) {
this.concepts = concepts;
}
@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception {
// set contextual data in JobExecutionContext
chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext()
.put("concepts", this.concepts);
log.info("Tasklet execution >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
// Thread.sleep(14000L);
return RepeatStatus.FINISHED;
}
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource queueJob() throws Exception {
//Allow unique combinations of JobParameters to run in parallel. An empty JobParameters() would only allow a JobInstance to run at a time.
final JobParameters jobParameters = new JobParametersBuilder().addString("param", "parameter with 250 char limit")
.addLong("time", System.currentTimeMillis()).toJobParameters();
final List<Concept> concepts = new ArrayList<Concept>();
final Concept c1 = new Concept();
c1.conceptName = "c1";
final Concept c2 = new Concept();
c2.conceptName = "c2";
concepts.add(c1);
concepts.add(c2);
return this.jobTemplate.launchTasklet(EXAMPLE_JOB_NAME, EXAMPLE_STEP_NAME, new ExampleApplicationTasklet(concepts),
jobParameters);
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@GET
@Path("widget")
@Produces(MediaType.APPLICATION_JSON)
public List<Widget> findAllWidgets() {
Page<Widget> page = this.widgetRepository.findAll(new PageRequest(0, 10));
return page.getContent();
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@POST
@Path("widget")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
//Wrapping in transaction (e.g. TransactionTemplate) not necessary as SimpleJpaRepository.save is annotated with @Transactional.
public Widget createWidget(Widget w) {
return this.widgetRepository.save(w);
}
private List<Widget> createWidgets() {
List<Widget> widgets = new ArrayList<Widget>();
for (int x = 0; x < 20; x++) {
Widget w = new Widget();
w.setName(RandomStringUtils.randomAlphanumeric(10));
widgets.add(w);
}
return widgets;
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@POST
@Path("widgets/batch")
public void batchWriteWidgets() {
final List<Widget> widgets = createWidgets();
this.transactionTemplate.execute(new TransactionCallback<Void>() {
@Override
public Void doInTransaction(TransactionStatus status) {
int i = 0;
for (Widget w : widgets) {
em.persist(w);
if (i % 5 == 0) { //5, same as the JDBC batch size
//flush a batch of inserts and release memory:
log.info("Flushing, clearing");
em.flush();
em.clear();
}
i++;
}
return null;
}
});
log.info("Persisted {} widgets", widgets.size());
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@POST
@Path("widgets")
public void writeWidgets() {
final List<Widget> widgets = createWidgets();
this.widgetRepository.save(widgets);
log.info("Persisted {} widgets", widgets.size());
}
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @param w DO NOT USE
* @summary DO NOT USE
*/
@POST
@Path("widget2")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
//@Transactional do not work with JAX-RS default config. Review caveots with @Transactional usage (proxy requirements).
//Note that SimpleJpaRepository.save is annotated with @Transactional and will use default (e.g. Propagations.REQUIRES). Illustration of deviating from default propagation.
public Widget createWidgetWith(final Widget w) {
try {
final Widget ret = getTransactionTemplateRequiresNew().execute(new TransactionCallback<Widget>() {
@Override
public Widget doInTransaction(final TransactionStatus status) {
return widgetRepository.save(w);
}
});
return ret;
} catch (final TransactionException e) {
log.error(whitelist(e));
throw e;
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exampleapplication/ExampleApplicationConfig.java | src/main/java/org/ohdsi/webapi/exampleapplication/ExampleApplicationConfig.java | package org.ohdsi.webapi.exampleapplication;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Configuration;
/**
*
*/
@Configuration
public class ExampleApplicationConfig {
private static final Logger log = LoggerFactory.getLogger(ExampleApplicationConfig.class);
//random @Beans
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exampleapplication/model/Widget.java | src/main/java/org/ohdsi/webapi/exampleapplication/model/Widget.java | package org.ohdsi.webapi.exampleapplication.model;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
/**
*
*/
@Entity(name = "EXAMPLEAPP_WIDGET")
public class Widget implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GenericGenerator(
name = "exampleapp_widget_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "exampleapp_widget_generator")
private Long id;
@Column(nullable = false)
private String name;
/**
* @return the name
*/
public String getName() {
return this.name;
}
/**
* @param name the name to set
*/
public void setName(final String name) {
this.name = name;
}
/**
* @return the id
*/
public Long getId() {
return this.id;
}
/**
* @param id the id to set
*/
public void setId(final Long id) {
this.id = id;
}
@Override
public String toString() {
return String.format("Widget Id=%s Name=%s", getId(), getName());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/exampleapplication/repository/WidgetRepository.java | src/main/java/org/ohdsi/webapi/exampleapplication/repository/WidgetRepository.java | package org.ohdsi.webapi.exampleapplication.repository;
import org.ohdsi.webapi.exampleapplication.model.Widget;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
/**
*
*/
@Repository
public interface WidgetRepository extends CrudRepository<Widget, Long> {
Page<Widget> findAll(Pageable pageable);
Page<Widget> findByNameContainingIgnoreCase(String name, String country, Pageable pageable);
Widget findByNameIgnoreCase(String name);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/PersonService.java | src/main/java/org/ohdsi/webapi/service/PersonService.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Comparator;
import java.util.Objects;
import java.util.Optional;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import org.apache.shiro.SecurityUtils;
import org.ohdsi.webapi.person.ObservationPeriod;
import org.ohdsi.webapi.person.PersonRecord;
import org.ohdsi.webapi.person.CohortPerson;
import org.ohdsi.webapi.person.PersonProfile;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
@Path("{sourceKey}/person/")
@Component
public class PersonService extends AbstractDaoService {
@Value("${person.viewDates}")
private Boolean viewDatesPermitted;
@Value("#{!'${security.provider}'.equals('DisabledSecurity')}")
private boolean securityEnabled;
/**
* Get the complete medical history for a single person in a single database
* @summary Get complete patient profile
* @param sourceKey Data source to extract from
* @param personId Person whose profile to extract
* @param cohortId (optional) Cohort used to adjust start and end dates. If the person is a member of the cohort then
* start and end dates will be adjusted so they are relative to the cohort start date.
* @return All records in a patient profile with start and end days relative to cohort start date or initial date of
* observation
*/
@Path("{personId}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public PersonProfile getPersonProfile(@PathParam("sourceKey") String sourceKey, @PathParam("personId") String personId,
@DefaultValue("0") @QueryParam("cohort") Long cohortId)
{
final PersonProfile profile = new PersonProfile();
boolean showDates = this.canViewDates();
Source source = getSourceRepository().findBySourceKey(sourceKey);
profile.gender = "not found";
profile.yearOfBirth = 0;
PreparedStatementRenderer psrPersonInfo = preparePersonInfoSql(personId, source);
getSourceJdbcTemplate(source).query(psrPersonInfo.getSql(), psrPersonInfo.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet resultSet, int arg1) throws SQLException {
profile.yearOfBirth = resultSet.getInt("year_of_birth");
profile.gender = resultSet.getString("gender");
return null;
}
});
if (profile.gender.equals("not found")) {
throw new RuntimeException("Can't find person " + personId);
}
// get observation periods
PreparedStatementRenderer psrObservationPeriods = prepareObservationPeriodsSql(personId, source);
getSourceJdbcTemplate(source).query(psrObservationPeriods.getSql(), psrObservationPeriods.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet resultSet, int arg1) throws SQLException {
ObservationPeriod op = new ObservationPeriod();
op.startDate = resultSet.getTimestamp("start_date");
op.endDate = resultSet.getTimestamp("end_date");
op.type = resultSet.getString("observation_period_type");
op.id = resultSet.getLong("observation_period_id");
profile.observationPeriods.add(op);
return null;
}
});
// get simplified records
PreparedStatementRenderer psrPersonProfile = prepareGetPersonProfile(personId, source);
getSourceJdbcTemplate(source).query(psrPersonProfile.getSql(), psrPersonProfile.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet resultSet, int arg1) throws SQLException {
PersonRecord item = new PersonRecord();
item.conceptId = resultSet.getLong("concept_id");
item.conceptName = resultSet.getString("concept_name");
item.domain = resultSet.getString("domain");
item.startDate = resultSet.getTimestamp("start_date");
item.endDate = resultSet.getTimestamp("end_date");
profile.records.add(item);
return null;
}
});
PreparedStatementRenderer psrGetCohorts = prepareGetCohortsSql(personId, source);
getSourceJdbcTemplate(source).query(psrGetCohorts.getSql(), psrGetCohorts.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet resultSet, int arg1) throws SQLException {
CohortPerson item = new CohortPerson();
item.startDate = resultSet.getTimestamp("cohort_start_date");
item.endDate = resultSet.getTimestamp("cohort_end_date");
item.cohortDefinitionId = resultSet.getLong("cohort_definition_id");
profile.cohorts.add(item);
return null;
}
});
LocalDateTime cohortStartDate = null;
Optional<CohortPerson> cohort = cohortId > 0 ? profile.cohorts.stream().filter(c -> c.cohortDefinitionId.equals(cohortId)).findFirst() :
Optional.empty();
cohortStartDate = cohort.map(c -> c.startDate.toLocalDateTime()).orElseGet(() ->
profile.records.stream().min(Comparator.comparing(c -> c.startDate))
.map(r -> r.startDate.toLocalDateTime()).orElse(null));
if (cohortStartDate != null && profile.yearOfBirth > 0) {
profile.ageAtIndex = cohortStartDate.getYear() - profile.yearOfBirth;
}
for(PersonRecord record : profile.records){
record.startDay = Math.toIntExact(ChronoUnit.DAYS.between(cohortStartDate, record.startDate.toLocalDateTime()));
record.endDay = Objects.nonNull(record.endDate) ? Math.toIntExact(ChronoUnit.DAYS.between(cohortStartDate,
record.endDate.toLocalDateTime())) : record.startDay;
if (!showDates) {
record.startDate = null;
record.endDate = null;
}
}
for(ObservationPeriod period : profile.observationPeriods){
period.x1 = Math.toIntExact(ChronoUnit.DAYS.between(cohortStartDate,
period.startDate.toLocalDateTime()));
period.x2 = Math.toIntExact(ChronoUnit.DAYS.between(cohortStartDate,
period.endDate.toLocalDateTime()));
if (!showDates) {
period.startDate = null;
period.endDate = null;
}
}
return profile;
}
protected PreparedStatementRenderer prepareObservationPeriodsSql(String personId, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
return new PreparedStatementRenderer(source, "/resources/person/sql/getObservationPeriods.sql", "tableQualifier", resultsTableQualifier, "personId", Long.valueOf(personId));
}
protected PreparedStatementRenderer prepareGetCohortsSql(String personId, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
return new PreparedStatementRenderer(source, "/resources/person/sql/getCohorts.sql", "tableQualifier", resultsTableQualifier, "subjectId", Long.valueOf(personId));
}
protected PreparedStatementRenderer preparePersonInfoSql(String personId, Source source) {
String tableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
return new PreparedStatementRenderer(source, "/resources/person/sql/personInfo.sql", "tableQualifier", tableQualifier, "personId", Long.valueOf(personId));
}
protected PreparedStatementRenderer prepareGetPersonProfile(String personId, Source source) {
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
return new PreparedStatementRenderer(source, "/resources/person/sql/getRecords.sql", "tableQualifier", tqValue, "personId", Long.valueOf(personId));
}
private Boolean canViewDates() {
if (this.viewDatesPermitted && this.securityEnabled) {
return SecurityUtils.getSubject().isPermitted(Security.PROFILE_VIEW_DATES_PERMISSION);
} else {
return this.viewDatesPermitted;
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/UserService.java | src/main/java/org/ohdsi/webapi/service/UserService.java | package org.ohdsi.webapi.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.odysseusinc.logging.event.*;
import org.eclipse.collections.impl.block.factory.Comparators;
import org.ohdsi.webapi.shiro.Entities.PermissionEntity;
import org.ohdsi.webapi.shiro.Entities.RoleEntity;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.PermissionManager;
import org.ohdsi.webapi.user.Role;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.stereotype.Component;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
*
* @author gennadiy.anisimov
*/
@Path("/")
@Component
public class UserService {
@Autowired
private PermissionManager authorizer;
@Autowired
private ApplicationEventPublisher eventPublisher;
@Value("${security.ad.default.import.group}#{T(java.util.Collections).emptyList()}")
private List<String> defaultRoles;
private Map<String, String> roleCreatorPermissionsTemplate = new LinkedHashMap<>();
public UserService() {
this.roleCreatorPermissionsTemplate.put("role:%s:permissions:*:put", "Add permissions to role with ID = %s");
this.roleCreatorPermissionsTemplate.put("role:%s:permissions:*:delete", "Remove permissions from role with ID = %s");
this.roleCreatorPermissionsTemplate.put("role:%s:put", "Update role with ID = %s");
this.roleCreatorPermissionsTemplate.put("role:%s:delete", "Delete role with ID = %s");
}
public static class User implements Comparable<User> {
public Long id;
public String login;
public String name;
public List<Permission> permissions;
public Map<String, List<String>> permissionIdx;
public User() {}
public User(UserEntity userEntity) {
this.id = userEntity.getId();
this.login = userEntity.getLogin();
this.name = userEntity.getName();
}
@Override
public int compareTo(User o) {
Comparator c = Comparators.naturalOrder();
if (this.id == null && o.id == null)
return c.compare(this.login, o.login);
else
return c.compare(this.id, o.id);
}
}
public static class Permission implements Comparable<Permission> {
public Long id;
public String permission;
public String description;
public Permission() {}
public Permission(PermissionEntity permissionEntity) {
this.id = permissionEntity.getId();
this.permission = permissionEntity.getValue();
this.description = permissionEntity.getDescription();
}
@Override
public int compareTo(Permission o) {
Comparator c = Comparators.naturalOrder();
if (this.id == null && o.id == null)
return c.compare(this.permission, o.permission);
else
return c.compare(this.id, o.id);
}
}
@GET
@Path("user")
@Produces(MediaType.APPLICATION_JSON)
public ArrayList<User> getUsers() {
Iterable<UserEntity> userEntities = this.authorizer.getUsers();
ArrayList<User> users = convertUsers(userEntities);
return users;
}
@GET
@Path("user/me")
@Produces(MediaType.APPLICATION_JSON)
public User getCurrentUser() throws Exception {
UserEntity currentUser = this.authorizer.getCurrentUser();
Iterable<PermissionEntity> permissions = this.authorizer.getUserPermissions(currentUser.getId());
User user = new User();
user.id = currentUser.getId();
user.login = currentUser.getLogin();
user.name = currentUser.getName();
user.permissions = convertPermissions(permissions);
user.permissionIdx = authorizer.queryUserPermissions(currentUser.getLogin()).permissions;
return user;
}
@GET
@Path("user/{userId}/permissions")
@Produces(MediaType.APPLICATION_JSON)
public List<Permission> getUsersPermissions(@PathParam("userId") Long userId) throws Exception {
Set<PermissionEntity> permissionEntities = this.authorizer.getUserPermissions(userId);
List<Permission> permissions = convertPermissions(permissionEntities);
Collections.sort(permissions);
return permissions;
}
@GET
@Path("user/{userId}/roles")
@Produces(MediaType.APPLICATION_JSON)
public ArrayList<Role> getUserRoles(@PathParam("userId") Long userId) throws Exception {
Set<RoleEntity> roleEntities = this.authorizer.getUserRoles(userId);
ArrayList<Role> roles = convertRoles(roleEntities);
Collections.sort(roles);
return roles;
}
@POST
@Path("role")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Role createRole(Role role) throws Exception {
RoleEntity roleEntity = this.authorizer.addRole(role.role, true);
RoleEntity personalRole = this.authorizer.getCurrentUserPersonalRole();
this.authorizer.addPermissionsFromTemplate(
personalRole,
this.roleCreatorPermissionsTemplate,
String.valueOf(roleEntity.getId()));
Role newRole = new Role(roleEntity);
eventPublisher.publishEvent(new AddRoleEvent(this, newRole.id, newRole.role));
return newRole;
}
@PUT
@Path("role/{roleId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Role updateRole(@PathParam("roleId") Long id, Role role) throws Exception {
RoleEntity roleEntity = this.authorizer.getRole(id);
if (roleEntity == null) {
throw new Exception("Role doesn't exist");
}
roleEntity.setName(role.role);
roleEntity = this.authorizer.updateRole(roleEntity);
eventPublisher.publishEvent(new ChangeRoleEvent(this, id, role.role));
return new Role(roleEntity);
}
@GET
@Path("role")
@Produces(MediaType.APPLICATION_JSON)
public ArrayList<Role> getRoles(
@DefaultValue("false") @QueryParam("include_personal") boolean includePersonalRoles) {
Iterable<RoleEntity> roleEntities = this.authorizer.getRoles(includePersonalRoles);
ArrayList<Role> roles = convertRoles(roleEntities);
return roles;
}
@GET
@Path("role/{roleId}")
@Produces(MediaType.APPLICATION_JSON)
public Role getRole(@PathParam("roleId") Long id) {
RoleEntity roleEntity = this.authorizer.getRole(id);
Role role = new Role(roleEntity);
return role;
}
@DELETE
@Path("role/{roleId}")
public void removeRole(@PathParam("roleId") Long roleId) {
this.authorizer.removeRole(roleId);
this.authorizer.removePermissionsFromTemplate(this.roleCreatorPermissionsTemplate, String.valueOf(roleId));
}
@GET
@Path("role/{roleId}/permissions")
@Produces(MediaType.APPLICATION_JSON)
public List<Permission> getRolePermissions(@PathParam("roleId") Long roleId) throws Exception {
Set<PermissionEntity> permissionEntities = this.authorizer.getRolePermissions(roleId);
List<Permission> permissions = convertPermissions(permissionEntities);
Collections.sort(permissions);
return permissions;
}
@PUT
@Path("role/{roleId}/permissions/{permissionIdList}")
public void addPermissionToRole(@PathParam("roleId") Long roleId, @PathParam("permissionIdList") String permissionIdList) throws Exception {
String[] ids = permissionIdList.split("\\+");
for (String permissionIdString : ids) {
Long permissionId = Long.parseLong(permissionIdString);
this.authorizer.addPermission(roleId, permissionId);
eventPublisher.publishEvent(new AddPermissionEvent(this, permissionId, roleId));
}
}
@DELETE
@Path("role/{roleId}/permissions/{permissionIdList}")
public void removePermissionFromRole(@PathParam("roleId") Long roleId, @PathParam("permissionIdList") String permissionIdList) {
String[] ids = permissionIdList.split("\\+");
for (String permissionIdString : ids) {
Long permissionId = Long.parseLong(permissionIdString);
this.authorizer.removePermission(permissionId, roleId);
eventPublisher.publishEvent(new DeletePermissionEvent(this, permissionId, roleId));
}
}
@GET
@Path("role/{roleId}/users")
@Produces(MediaType.APPLICATION_JSON)
public ArrayList<User> getRoleUsers(@PathParam("roleId") Long roleId) throws Exception {
Set<UserEntity> userEntities = this.authorizer.getRoleUsers(roleId);
ArrayList<User> users = this.convertUsers(userEntities);
Collections.sort(users);
return users;
}
@PUT
@Path("role/{roleId}/users/{userIdList}")
public void addUserToRole(@PathParam("roleId") Long roleId, @PathParam("userIdList") String userIdList) throws Exception {
String[] ids = userIdList.split("\\+");
for (String userIdString : ids) {
Long userId = Long.parseLong(userIdString);
this.authorizer.addUser(userId, roleId);
eventPublisher.publishEvent(new AssignRoleEvent(this, roleId, userId));
}
}
@DELETE
@Path("role/{roleId}/users/{userIdList}")
public void removeUserFromRole(@PathParam("roleId") Long roleId, @PathParam("userIdList") String userIdList) {
String[] ids = userIdList.split("\\+");
for (String userIdString : ids) {
Long userId = Long.parseLong(userIdString);
this.authorizer.removeUser(userId, roleId);
eventPublisher.publishEvent(new UnassignRoleEvent(this, roleId, userId));
}
}
private List<Permission> convertPermissions(final Iterable<PermissionEntity> permissionEntities) {
return StreamSupport.stream(permissionEntities.spliterator(), false)
.map(UserService.Permission::new)
.collect(Collectors.toList());
}
private ArrayList<Role> convertRoles(final Iterable<RoleEntity> roleEntities) {
ArrayList<Role> roles = new ArrayList<>();
for (RoleEntity roleEntity : roleEntities) {
Role role = new Role(roleEntity, defaultRoles.contains(roleEntity.getName()));
roles.add(role);
}
return roles;
}
private ArrayList<User> convertUsers(final Iterable<UserEntity> userEntities) {
ArrayList<User> users = new ArrayList<>();
for (UserEntity userEntity : userEntities) {
User user = new User(userEntity);
users.add(user);
}
return users;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortService.java | src/main/java/org/ohdsi/webapi/service/CohortService.java | package org.ohdsi.webapi.service;
import java.util.List;
import javax.persistence.EntityManager;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.ohdsi.webapi.cohort.CohortEntity;
import org.ohdsi.webapi.cohort.CohortRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
/**
* Service to read/write to the Cohort table
*/
@Path("/cohort/")
@Component
public class CohortService {
@Autowired
public CohortRepository cohortRepository;
@Autowired
private TransactionTemplate transactionTemplate;
@Autowired
private EntityManager em;
/**
* Retrieves all cohort entities for the given cohort definition id
* from the COHORT table
*
* @param id Cohort Definition id
* @return List of CohortEntity
*/
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public List<CohortEntity> getCohortListById(@PathParam("id") final long id) {
List<CohortEntity> d = this.cohortRepository.getAllCohortsForId(id);
return d;
}
/**
* Imports a List of CohortEntity into the COHORT table
*
* @param cohort List of CohortEntity
* @return status
*/
@POST
@Path("import")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.TEXT_PLAIN)
public String saveCohortListToCDM(final List<CohortEntity> cohort) {
this.transactionTemplate.execute(new TransactionCallback<Void>() {
@Override
public Void doInTransaction(TransactionStatus status) {
int i = 0;
for (CohortEntity cohortEntity : cohort) {
em.persist(cohortEntity);
if (i % 5 == 0) { //5, same as the JDBC batch size
//flush a batch of inserts and release memory:
em.flush();
em.clear();
}
i++;
}
return null;
}
});
return "ok";
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java | src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.commonmark.Extension;
import org.commonmark.ext.gfm.tables.TablesExtension;
import org.commonmark.node.*;
import org.commonmark.parser.Parser;
import org.commonmark.renderer.html.HtmlRenderer;
import org.ohdsi.analysis.Utils;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType;
import org.ohdsi.circe.check.Checker;
import org.ohdsi.circe.cohortdefinition.CohortExpression;
import org.ohdsi.circe.cohortdefinition.CohortExpressionQueryBuilder;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import org.ohdsi.circe.cohortdefinition.printfriendly.MarkdownRender;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.featureExtraction.FeatureExtraction;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.check.CheckResult;
import org.ohdsi.webapi.check.checker.cohort.CohortChecker;
import org.ohdsi.webapi.check.warning.Warning;
import org.ohdsi.webapi.check.warning.WarningUtils;
import org.ohdsi.webapi.cohortcharacterization.dto.CcDistributionStat;
import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat;
import org.ohdsi.webapi.cohortcharacterization.dto.CcResult;
import org.ohdsi.webapi.cohortcharacterization.dto.ExecutionResultRequest;
import org.ohdsi.webapi.cohortcharacterization.report.AnalysisItem;
import org.ohdsi.webapi.cohortcharacterization.report.AnalysisResultItem;
import org.ohdsi.webapi.cohortcharacterization.report.Report;
import org.ohdsi.webapi.cohortdefinition.CleanupCohortTasklet;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.cohortdefinition.InclusionRuleReport;
import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO;
import org.ohdsi.webapi.cohortdefinition.dto.CohortGenerationInfoDTO;
import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataDTO;
import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataImplDTO;
import org.ohdsi.webapi.cohortdefinition.dto.CohortVersionFullDTO;
import org.ohdsi.webapi.cohortsample.CleanupCohortSamplesTasklet;
import org.ohdsi.webapi.cohortsample.CohortSamplingService;
import org.ohdsi.webapi.cohortdefinition.dto.CohortRawDTO;
import org.ohdsi.webapi.cohortdefinition.event.CohortDefinitionChangedEvent;
import org.ohdsi.webapi.common.SourceMapKey;
import org.ohdsi.webapi.common.generation.GenerateSqlResult;
import org.ohdsi.webapi.common.sensitiveinfo.CohortGenerationSensitiveInfoService;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisEntityRepository;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.service.dto.CheckResultDTO;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.management.datasource.SourceIdAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.source.SourceInfo;
import org.ohdsi.webapi.tag.domain.HasTags;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.util.*;
import org.ohdsi.webapi.versioning.domain.CohortVersion;
import org.ohdsi.webapi.versioning.domain.Version;
import org.ohdsi.webapi.versioning.domain.VersionBase;
import org.ohdsi.webapi.versioning.domain.VersionType;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.ohdsi.webapi.versioning.service.VersionService;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.servlet.ServletContext;
import javax.transaction.Transactional;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.cache.CacheManager;
import javax.cache.configuration.MutableConfiguration;
import javax.ws.rs.core.Response.ResponseBuilder;
import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.DISTRIBUTION;
import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.PREVALENCE;
import static org.ohdsi.webapi.Constants.Params.COHORT_DEFINITION_ID;
import static org.ohdsi.webapi.Constants.Params.JOB_NAME;
import static org.ohdsi.webapi.Constants.Params.SOURCE_ID;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
import org.springframework.boot.autoconfigure.cache.JCacheManagerCustomizer;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
/**
* Provides REST services for working with cohort definitions.
*
* @summary Provides REST services for working with cohort definitions.
* @author cknoll1
*/
@Path("/cohortdefinition")
@Component
public class CohortDefinitionService extends AbstractDaoService implements HasTags<Integer> {
//create cache
@Component
public static class CachingSetup implements JCacheManagerCustomizer {
public static final String COHORT_DEFINITION_LIST_CACHE = "cohortDefinitionList";
@Override
public void customize(CacheManager cacheManager) {
// Evict when a cohort definition is created or updated, or permissions, or tags
if (!CacheHelper.getCacheNames(cacheManager).contains(COHORT_DEFINITION_LIST_CACHE)) {
cacheManager.createCache(COHORT_DEFINITION_LIST_CACHE, new MutableConfiguration<String, List<CohortMetadataDTO>>()
.setTypes(String.class, (Class<List<CohortMetadataDTO>>) (Class<?>) List.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
}
}
private static final CohortExpressionQueryBuilder queryBuilder = new CohortExpressionQueryBuilder();
private static final int DEMOGRAPHIC_MODE = 2;
private static final String DEMOGRAPHIC_DOMAIN = "DEMOGRAPHICS";
private static final String[] PARAMETERS_RESULTS_FILTERED = { "cohort_characterization_generation_id",
"threshold_level", "analysis_ids", "cohort_ids", "vocabulary_schema" };
private final List<String[]> executionPrevalenceHeaderLines = new ArrayList<String[]>() {
{
add(new String[] { "Analysis ID", "Analysis name", "Strata ID", "Strata name", "Cohort ID", "Cohort name",
"Covariate ID", "Covariate name", "Covariate short name", "Count", "Percent" });
}
};
private final List<String[]> executionDistributionHeaderLines = new ArrayList<String[]>() {
{
add(new String[] { "Analysis ID", "Analysis name", "Strata ID", "Strata name", "Cohort ID", "Cohort name",
"Covariate ID", "Covariate name", "Covariate short name", "Value field",
"Missing Means Zero", "Count", "Avg", "StdDev", "Min", "P10", "P25", "Median", "P75", "P90",
"Max" });
}
};
private final List<String[]> executionComparativeHeaderLines = new ArrayList<String[]>() {
{
add(new String[] { "Analysis ID", "Analysis name", "Strata ID", "Strata name", "Target cohort ID",
"Target cohort name", "Comparator cohort ID", "Comparator cohort name", "Covariate ID",
"Covariate name", "Covariate short name", "Target count", "Target percent",
"Comparator count", "Comparator percent", "Std. Diff Of Mean" });
}
};
private Map<String, FeatureExtraction.PrespecAnalysis> prespecAnalysisMap = FeatureExtraction
.getNameToPrespecAnalysis();
private final String QUERY_RESULTS = ResourceHelper
.GetResourceAsString("/resources/cohortcharacterizations/sql/queryResults.sql");
@Autowired
private CohortDefinitionRepository cohortDefinitionRepository;
@Autowired
private JobBuilderFactory jobBuilders;
@Autowired
private StepBuilderFactory stepBuilders;
@Autowired
private JobTemplate jobTemplate;
@Autowired
private CohortGenerationService cohortGenerationService;
@Autowired
private JobService jobService;
@Autowired
private CohortGenerationSensitiveInfoService sensitiveInfoService;
@Autowired
private SourceIdAccessor sourceIdAccessor;
@Autowired
ConversionService conversionService;
@Autowired
private ObjectMapper objectMapper;
@Autowired
private CohortSamplingService samplingService;
@Autowired
private ApplicationEventPublisher eventPublisher;
@Autowired
private SourceService sourceService;
@Autowired
private VocabularyService vocabularyService;
@Autowired
private PermissionService permissionService;
@PersistenceContext
protected EntityManager entityManager;
@Autowired
private CohortChecker cohortChecker;
@Autowired
private VersionService<CohortVersion> versionService;
@Autowired
private FeAnalysisEntityRepository feAnalysisRepository;
@Autowired
private SourceAwareSqlRender sourceAwareSqlRender;
@Value("${security.defaultGlobalReadPermissions}")
private boolean defaultGlobalReadPermissions;
private final MarkdownRender markdownPF = new MarkdownRender();
private final List<Extension> extensions = Arrays.asList(TablesExtension.create());
private final RowMapper<InclusionRuleReport.Summary> summaryMapper = (rs, rowNum) -> {
InclusionRuleReport.Summary summary = new InclusionRuleReport.Summary();
summary.baseCount = rs.getLong("base_count");
summary.finalCount = rs.getLong("final_count");
summary.lostCount = rs.getLong("lost_count");
double matchRatio = (summary.baseCount > 0) ? ((double) summary.finalCount / (double) summary.baseCount) : 0.0;
summary.percentMatched = new BigDecimal(matchRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString() + "%";
return summary;
};
private final RowMapper<InclusionRuleReport.InclusionRuleStatistic> inclusionRuleStatisticMapper = new RowMapper<InclusionRuleReport.InclusionRuleStatistic>() {
@Override
public InclusionRuleReport.InclusionRuleStatistic mapRow(ResultSet rs, int rowNum) throws SQLException {
InclusionRuleReport.InclusionRuleStatistic statistic = new InclusionRuleReport.InclusionRuleStatistic();
statistic.id = rs.getInt("rule_sequence");
statistic.name = rs.getString("name");
statistic.countSatisfying = rs.getLong("person_count");
long personTotal = rs.getLong("person_total");
long gainCount = rs.getLong("gain_count");
double excludeRatio = personTotal > 0 ? (double) gainCount / (double) personTotal : 0.0;
String percentExcluded = new BigDecimal(excludeRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString();
statistic.percentExcluded = percentExcluded + "%";
long satisfyCount = rs.getLong("person_count");
double satisfyRatio = personTotal > 0 ? (double) satisfyCount / (double) personTotal : 0.0;
String percentSatisfying = new BigDecimal(satisfyRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString();
statistic.percentSatisfying = percentSatisfying + "%";
return statistic;
}
};
private final RowMapper<Long[]> inclusionRuleResultItemMapper = new RowMapper<Long[]>() {
@Override
public Long[] mapRow(ResultSet rs, int rowNum) throws SQLException {
Long[] resultItem = new Long[2];
resultItem[0] = rs.getLong("inclusion_rule_mask");
resultItem[1] = rs.getLong("person_count");
return resultItem;
}
};
private CohortGenerationInfo findBySourceId(Set<CohortGenerationInfo> infoList, Integer sourceId) {
for (CohortGenerationInfo info : infoList) {
if (info.getId().getSourceId().equals(sourceId)) {
return info;
}
}
return null;
}
private InclusionRuleReport.Summary getInclusionRuleReportSummary(int id, Source source, int modeId) {
String sql = "select cs.base_count, cs.final_count, cc.lost_count from @tableQualifier.cohort_summary_stats cs left join @tableQualifier.cohort_censor_stats cc "
+ "on cc.cohort_definition_id = cs.cohort_definition_id where cs.cohort_definition_id = @id and cs.mode_id = @modeId";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String[] varNames = {"id", "modeId"};
Object[] varValues = {whitelist(id), whitelist(modeId)};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, varNames, varValues, SessionUtils.sessionId());
List<InclusionRuleReport.Summary> result = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), summaryMapper);
return result.isEmpty() ? new InclusionRuleReport.Summary() : result.get(0);
}
private List<InclusionRuleReport.InclusionRuleStatistic> getInclusionRuleStatistics(int id, Source source, int modeId) {
String sql = "select i.rule_sequence, i.name, s.person_count, s.gain_count, s.person_total"
+ " from @tableQualifier.cohort_inclusion i join @tableQualifier.cohort_inclusion_stats s on i.cohort_definition_id = s.cohort_definition_id"
+ " and i.rule_sequence = s.rule_sequence"
+ " where i.cohort_definition_id = @id and mode_id = @modeId ORDER BY i.rule_sequence";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String[] varNames = {"id", "modeId"};
Object[] varValues = {whitelist(id), whitelist(modeId)};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, varNames, varValues, SessionUtils.sessionId());
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), inclusionRuleStatisticMapper);
}
private List<Report> getDemographicStatistics(int id, Source source,
int modeId, long ccGenerateId) {
ExecutionResultRequest params = new ExecutionResultRequest();
// Get FE Analysis Demographic (Gender, Age, Race,)
Set<FeAnalysisEntity> featureAnalyses = feAnalysisRepository.findByListIds(Arrays.asList(70, 72, 74, 77));
params.setCohortIds(Arrays.asList(id));
params.setAnalysisIds(featureAnalyses.stream().map(this::mapFeatureAnalysisId).collect(Collectors.toList()));
params.setDomainIds(Arrays.asList(DEMOGRAPHIC_DOMAIN));
List<CcResult> ccResults = findResults(ccGenerateId, params, source);
Map<Integer, AnalysisItem> analysisMap = new HashMap<>();
ccResults.stream().peek(cc -> {
if (StandardFeatureAnalysisType.PRESET.toString().equals(cc.getFaType())) {
featureAnalyses.stream().filter(fa -> Objects.equals(fa.getDesign(), cc.getAnalysisName())).findFirst()
.ifPresent(v -> cc.setAnalysisId(v.getId()));
}
}).forEach(ccResult -> {
if (ccResult instanceof CcPrevalenceStat) {
analysisMap.putIfAbsent(ccResult.getAnalysisId(), new AnalysisItem());
AnalysisItem analysisItem = analysisMap.get(ccResult.getAnalysisId());
analysisItem.setType(ccResult.getResultType());
analysisItem.setName(ccResult.getAnalysisName());
analysisItem.setFaType(ccResult.getFaType());
List<CcResult> results = analysisItem.getOrCreateCovariateItem(
((CcPrevalenceStat) ccResult).getCovariateId(), ccResult.getStrataId());
results.add(ccResult);
}
});
CohortDefinition cohortDefinition = cohortDefinitionRepository.findOne(id);
List<Report> reports = prepareReportData(analysisMap,
new HashSet<CohortDefinition>(Arrays.asList(cohortDefinition)), featureAnalyses);
return reports;
}
private List<Report> prepareReportData(Map<Integer, AnalysisItem> analysisMap, Set<CohortDefinition> cohortDefs,
Set<FeAnalysisEntity> featureAnalyses) {
// Create map to get cohort name by its id
final Map<Integer, CohortDefinition> definitionMap = cohortDefs.stream()
.collect(Collectors.toMap(CohortDefinition::getId, Function.identity()));
// Create map to get feature analyses by its name
final Map<String, String> feAnalysisMap = featureAnalyses.stream()
.collect(Collectors.toMap(this::mapFeatureName, entity -> entity.getDomain().toString()));
List<Report> reports = new ArrayList<>();
try {
// list to accumulate results from simple reports
List<AnalysisResultItem> simpleResultSummary = new ArrayList<>();
// list to accumulate results from comparative reports
List<AnalysisResultItem> comparativeResultSummary = new ArrayList<>();
// do not create summary reports when only one analyses is present
boolean ignoreSummary = analysisMap.keySet().size() == 1;
for (Integer analysisId : analysisMap.keySet()) {
analysisMap.putIfAbsent(analysisId, new AnalysisItem());
AnalysisItem analysisItem = analysisMap.get(analysisId);
AnalysisResultItem resultItem = analysisItem.getSimpleItems(definitionMap, feAnalysisMap);
Report simpleReport = new Report(analysisItem.getName(), analysisId, resultItem);
simpleReport.faType = analysisItem.getFaType();
simpleReport.domainId = feAnalysisMap.get(analysisItem.getName());
if (PREVALENCE.equals(analysisItem.getType())) {
simpleReport.header = executionPrevalenceHeaderLines;
simpleReport.resultType = PREVALENCE;
// Summary comparative reports are only available for
// prevalence type
simpleResultSummary.add(resultItem);
} else if (DISTRIBUTION.equals(analysisItem.getType())) {
simpleReport.header = executionDistributionHeaderLines;
simpleReport.resultType = DISTRIBUTION;
}
reports.add(simpleReport);
// comparative mode
if (definitionMap.size() == 2) {
Iterator<CohortDefinition> iter = definitionMap.values().iterator();
CohortDefinition firstCohortDef = iter.next();
CohortDefinition secondCohortDef = iter.next();
AnalysisResultItem comparativeResultItem = analysisItem.getComparativeItems(firstCohortDef,
secondCohortDef, feAnalysisMap);
Report comparativeReport = new Report(analysisItem.getName(), analysisId, comparativeResultItem);
comparativeReport.header = executionComparativeHeaderLines;
comparativeReport.isComparative = true;
comparativeReport.faType = analysisItem.getFaType();
comparativeReport.domainId = feAnalysisMap.get(analysisItem.getName());
if (PREVALENCE.equals(analysisItem.getType())) {
comparativeReport.resultType = PREVALENCE;
// Summary comparative reports are only available for
// prevalence type
comparativeResultSummary.add(comparativeResultItem);
} else if (DISTRIBUTION.equals(analysisItem.getType())) {
comparativeReport.resultType = DISTRIBUTION;
}
reports.add(comparativeReport);
}
}
if (!ignoreSummary) {
// summary comparative reports are only available for prevalence
// type
if (!simpleResultSummary.isEmpty()) {
Report simpleSummaryData = new Report("All prevalence covariates", simpleResultSummary);
simpleSummaryData.header = executionPrevalenceHeaderLines;
simpleSummaryData.isSummary = true;
simpleSummaryData.resultType = PREVALENCE;
reports.add(simpleSummaryData);
}
// comparative mode
if (!comparativeResultSummary.isEmpty()) {
Report comparativeSummaryData = new Report("All prevalence covariates", comparativeResultSummary);
comparativeSummaryData.header = executionComparativeHeaderLines;
comparativeSummaryData.isSummary = true;
comparativeSummaryData.isComparative = true;
comparativeSummaryData.resultType = PREVALENCE;
reports.add(comparativeSummaryData);
}
}
return reports;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private String mapFeatureName(FeAnalysisEntity entity) {
if (StandardFeatureAnalysisType.PRESET == entity.getType()) {
return entity.getDesign().toString();
}
return entity.getName();
}
private List<CcResult> findResults(final Long generationId, ExecutionResultRequest params, Source source) {
return executeFindResults(generationId, params, QUERY_RESULTS, getGenerationResults(source), source);
}
private <T> List<T> executeFindResults(final Long generationId, ExecutionResultRequest params, String query,
RowMapper<T> rowMapper, Source source) {
String analysis = params.getAnalysisIds().stream().map(String::valueOf).collect(Collectors.joining(","));
String cohorts = params.getCohortIds().stream().map(String::valueOf).collect(Collectors.joining(","));
String generationResults = sourceAwareSqlRender.renderSql(source.getSourceId(), query,
PARAMETERS_RESULTS_FILTERED,
new String[] { String.valueOf(generationId), String.valueOf(params.getThresholdValuePct()), analysis,
cohorts, SourceUtils.getVocabularyQualifier(source) });
final String tempSchema = SourceUtils.getTempQualifier(source);
String translatedSql = SqlTranslate.translateSql(generationResults, source.getSourceDialect(),
SessionUtils.sessionId(), tempSchema);
return this.getSourceJdbcTemplate(source).query(translatedSql, rowMapper);
}
private RowMapper<CcResult> getGenerationResults(Source source) {
return (rs, rowNum) -> {
final String type = rs.getString("type");
if (StringUtils.equals(type, DISTRIBUTION.toString())) {
final CcDistributionStat distributionStat = new CcDistributionStat();
gatherForPrevalence(distributionStat, rs, source);
gatherForDistribution(distributionStat, rs);
return distributionStat;
} else if (StringUtils.equals(type, PREVALENCE.toString())) {
final CcPrevalenceStat prevalenceStat = new CcPrevalenceStat();
gatherForPrevalence(prevalenceStat, rs, source);
return prevalenceStat;
}
return null;
};
}
private void gatherForPrevalence(final CcPrevalenceStat stat, final ResultSet rs, Source source)
throws SQLException {
stat.setFaType(rs.getString("fa_type"));
stat.setSourceKey(source.getSourceKey());
stat.setCohortId(rs.getInt("cohort_definition_id"));
stat.setAnalysisId(rs.getInt("analysis_id"));
stat.setAnalysisName(rs.getString("analysis_name"));
stat.setResultType(PREVALENCE);
stat.setCovariateId(rs.getLong("covariate_id"));
stat.setCovariateName(rs.getString("covariate_name"));
stat.setConceptName(rs.getString("concept_name"));
stat.setConceptId(rs.getLong("concept_id"));
stat.setAvg(rs.getDouble("avg_value"));
stat.setCount(rs.getLong("count_value"));
stat.setStrataId(rs.getLong("strata_id"));
stat.setStrataName(rs.getString("strata_name"));
}
private void gatherForDistribution(final CcDistributionStat stat, final ResultSet rs) throws SQLException {
stat.setResultType(DISTRIBUTION);
stat.setAvg(rs.getDouble("avg_value"));
stat.setStdDev(rs.getDouble("stdev_value"));
stat.setMin(rs.getDouble("min_value"));
stat.setP10(rs.getDouble("p10_value"));
stat.setP25(rs.getDouble("p25_value"));
stat.setMedian(rs.getDouble("median_value"));
stat.setP75(rs.getDouble("p75_value"));
stat.setP90(rs.getDouble("p90_value"));
stat.setMax(rs.getDouble("max_value"));
stat.setAggregateId(rs.getInt("aggregate_id"));
stat.setAggregateName(rs.getString("aggregate_name"));
stat.setMissingMeansZero(rs.getInt("missing_means_zero") == 1);
}
private Integer mapFeatureAnalysisId(FeAnalysisEntity feAnalysis) {
if (feAnalysis.isPreset()) {
return prespecAnalysisMap.values().stream()
.filter(p -> Objects.equals(p.analysisName, feAnalysis.getDesign())).findFirst()
.orElseThrow(() -> new IllegalArgumentException(
String.format("Preset analysis with id=%s does not exist", feAnalysis.getId()))).analysisId;
} else {
return feAnalysis.getId();
}
}
private int countSetBits(long n) {
int count = 0;
while (n > 0) {
n &= (n - 1);
count++;
}
return count;
}
private String formatBitMask(Long n, int size) {
return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0"));
}
private String getInclusionRuleTreemapData(int id, int inclusionRuleCount, Source source, int modeId) {
String sql = "select inclusion_rule_mask, person_count from @tableQualifier.cohort_inclusion_result where cohort_definition_id = @id and mode_id = @modeId";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String[] varNames = {"id", "modeId"};
Object[] varValues = {whitelist(id), whitelist(modeId)};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, varNames, varValues, SessionUtils.sessionId());
// [0] is the inclusion rule bitmask, [1] is the count of the match
List<Long[]> items = this.getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), inclusionRuleResultItemMapper);
Map<Integer, List<Long[]>> groups = new HashMap<>();
for (Long[] item : items) {
int bitsSet = countSetBits(item[0]);
if (!groups.containsKey(bitsSet)) {
groups.put(bitsSet, new ArrayList<Long[]>());
}
groups.get(bitsSet).add(item);
}
StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : [");
List<Integer> groupKeys = new ArrayList<>(groups.keySet());
Collections.sort(groupKeys);
Collections.reverse(groupKeys);
int groupCount = 0;
// create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy)
for (Integer groupKey : groupKeys) {
if (groupCount > 0) {
treemapData.append(",");
}
treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey));
int groupItemCount = 0;
for (Long[] groupItem : groups.get(groupKey)) {
if (groupItemCount > 0) {
treemapData.append(",");
}
//sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}");
treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d}", formatBitMask(groupItem[0], inclusionRuleCount), groupItem[1]));
groupItemCount++;
}
groupCount++;
}
treemapData.append(StringUtils.repeat("]}", groupCount + 1));
return treemapData.toString();
}
public static class GenerateSqlRequest {
public GenerateSqlRequest() {
}
@JsonProperty("expression")
public CohortExpression expression;
@JsonProperty("options")
public CohortExpressionQueryBuilder.BuildExpressionQueryOptions options;
}
@Context
ServletContext context;
/**
* Returns OHDSI template SQL for a given cohort definition
*
* @summary Generate Sql
* @param request A GenerateSqlRequest containing the cohort expression and options.
* @return The OHDSI template SQL needed to generate the input cohort definition as a character string
*/
@Path("sql")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public GenerateSqlResult generateSql(GenerateSqlRequest request) {
CohortExpressionQueryBuilder.BuildExpressionQueryOptions options = request.options;
GenerateSqlResult result = new GenerateSqlResult();
if (options == null) {
options = new CohortExpressionQueryBuilder.BuildExpressionQueryOptions();
}
String expressionSql = queryBuilder.buildExpressionQuery(request.expression, options);
result.templateSql = SqlRender.renderSql(expressionSql, null, null);
return result;
}
/**
* Returns metadata about all cohort definitions in the WebAPI database
*
* @summary List Cohort Definitions
* @return List of metadata about all cohort definitions in WebAPI
* @see org.ohdsi.webapi.cohortdefinition.CohortMetadataDTO
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Transactional
@Cacheable(cacheNames = CachingSetup.COHORT_DEFINITION_LIST_CACHE, key = "@permissionService.getSubjectCacheKey()")
public List<CohortMetadataDTO> getCohortDefinitionList() {
List<CohortDefinition> definitions = cohortDefinitionRepository.list();
return definitions.stream()
.filter(!defaultGlobalReadPermissions ? entity -> permissionService.hasReadAccess(entity) : entity -> true)
.map(def -> {
CohortMetadataDTO dto = conversionService.convert(def, CohortMetadataImplDTO.class);
permissionService.fillWriteAccess(def, dto);
permissionService.fillReadAccess(def, dto);
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/TherapyPathResultsService.java | src/main/java/org/ohdsi/webapi/service/TherapyPathResultsService.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.apache.commons.lang3.math.NumberUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.therapy.TherapyPathReport;
import org.ohdsi.webapi.therapy.TherapyPathVector;
import org.ohdsi.webapi.therapy.TherapySummary;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
/**
* REST Services related to an initial implementation of treatment
* pathways but should not be used as it is old and should be deprecated
* in a future release of the code base.
*
* @summary Treatment pathways results (DO NOT USE)
*/
@Path("{sourceKey}/txpathresults/")
@Component
public class TherapyPathResultsService extends AbstractDaoService {
/**
* Get reports
*
* @summary DO NOT USE
* @deprecated
* @param sourceKey The source key
* @return The report info
*/
@Path("reports")
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<TherapyPathReport> getReports(@PathParam("sourceKey") String sourceKey) {
try {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetReports(source);
return getSourceJdbcTemplate(source).query(psr.getSql(),psr.getSetter(), new RowMapper<TherapyPathReport>() {
@Override
public TherapyPathReport mapRow(final ResultSet rs, final int arg1) throws SQLException {
final TherapyPathReport report = new TherapyPathReport();
report.reportId = rs.getInt(1);
report.reportCaption = rs.getString(2);
report.year = Integer.toString(rs.getInt(3));
if (report.year.equals("9999")) {
report.year = "All Years";
}
report.disease = rs.getString(4);
report.datasource = rs.getString(5);
return report;
}
});
} catch (Exception exception) {
throw new RuntimeException("Error getting therapy path reports" + exception.getMessage());
}
}
protected PreparedStatementRenderer prepareGetReports(Source source) {
String resourcePath="/resources/therapypathresults/sql/getTherapyPathReports.sql";
String tqName="OHDSI_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
return new PreparedStatementRenderer(source, resourcePath, tqName, tqValue);
}
/**
* Get the report by ID
*
* @summary DO NOT USE
* @deprecated
* @param id The report ID
* @param sourceKey The source key
* @return Therapy path vector
*/
@Path("report/{id}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<TherapyPathVector> getTherapyPathVectors(@PathParam("id") String id, @PathParam("sourceKey") String sourceKey) {
try {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetTherapyVectors(id, source);
return getSourceJdbcTemplate(source).query(psr.getSql(),psr.getSetter(), new RowMapper<TherapyPathVector>() {
@Override
public TherapyPathVector mapRow(final ResultSet rs, final int arg1) throws SQLException {
final TherapyPathVector vector = new TherapyPathVector();
vector.key = rs.getString("ResultKey");
vector.count = rs.getInt("ResultCount");
return vector;
}
});
} catch (Exception exception) {
throw new RuntimeException("Error getting therapy path vectors - " + exception.getMessage());
}
}
protected PreparedStatementRenderer prepareGetTherapyVectors(String id, Source source) {
String sqlPath="/resources/therapypathresults/sql/getTherapyPathVectors.sql";
return new PreparedStatementRenderer(source, sqlPath, null, (String) null, "id", Integer.valueOf(id));
}
/**
* Get the report summary
*
* @summary DO NOT USE
* @deprecated
* @param sourceKey The source key
* @param identifiers The report identifiers
* @return List<TherapySummary>
*/
@Path("summary")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<TherapySummary> getSummaries(@PathParam("sourceKey") String sourceKey, String[] identifiers) {
try {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetSummaries(identifiers, source);
return getSourceJdbcTemplate(source).query(psr.getSql(),psr.getSetter(), new RowMapper<TherapySummary>() {
@Override
public TherapySummary mapRow(final ResultSet rs, final int arg1) throws SQLException {
final TherapySummary summary = new TherapySummary();
summary.key = rs.getString(1);
summary.name = rs.getString(2);
summary.tx1 = rs.getInt(3);
summary.tx2 = rs.getInt(4);
summary.tx3 = rs.getInt(5);
summary.total = rs.getInt(6);
return summary;
}
});
} catch (Exception exception) {
throw new RuntimeException("Error getting therapy path summary - " + exception.getMessage());
}
}
protected PreparedStatementRenderer prepareGetSummaries(String[] identifiers, Source source) {
String sqlPath = "/resources/therapypathresults/sql/getTherapySummaries.sql";
List<Integer> values = Arrays.stream(identifiers).map(NumberUtils::toInt).collect(Collectors.toList());
return new PreparedStatementRenderer(source, sqlPath, null, (String) null, "identifiersList", values.toArray());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/VocabularyService.java | src/main/java/org/ohdsi/webapi/service/VocabularyService.java | package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.service.cscompare.ConceptSetCompareService.CONCEPT_SET_COMPARISON_ROW_MAPPER;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.cache.CacheManager;
import javax.cache.configuration.MutableConfiguration;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.vocabulary.Concept;
import org.ohdsi.vocabulary.SearchProviderConfig;
import org.ohdsi.webapi.activity.Activity.ActivityType;
import org.ohdsi.webapi.activity.Tracker;
import org.ohdsi.webapi.conceptset.ConceptSetComparison;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.conceptset.ConceptSetOptimizationResult;
import org.ohdsi.webapi.service.cscompare.CompareArbitraryDto;
import org.ohdsi.webapi.service.cscompare.ConceptSetCompareService;
import org.ohdsi.webapi.service.cscompare.ExpressionFileUtils;
import org.ohdsi.webapi.service.vocabulary.ConceptSetStrategy;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.source.SourceInfo;
import org.ohdsi.webapi.util.CacheHelper;
import org.ohdsi.webapi.util.PreparedSqlRender;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.vocabulary.ConceptRecommendedNotInstalledException;
import org.ohdsi.webapi.vocabulary.ConceptRelationship;
import org.ohdsi.webapi.vocabulary.ConceptSearch;
import org.ohdsi.webapi.vocabulary.ConceptSetCondenser;
import org.ohdsi.webapi.vocabulary.DescendentOfAncestorSearch;
import org.ohdsi.webapi.vocabulary.Domain;
import org.ohdsi.webapi.vocabulary.RecommendedConcept;
import org.ohdsi.webapi.vocabulary.RelatedConcept;
import org.ohdsi.webapi.vocabulary.RelatedConceptSearch;
import org.ohdsi.webapi.vocabulary.Vocabulary;
import org.ohdsi.webapi.vocabulary.VocabularyInfo;
import org.ohdsi.webapi.vocabulary.VocabularySearchService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.cache.JCacheManagerCustomizer;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.annotation.Caching;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.core.RowCallbackHandler;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import org.ohdsi.webapi.vocabulary.MappedRelatedConcept;
/**
* Provides REST services for working with
* the OMOP standardized vocabularies
*
* @summary Vocabulary
*/
@Path("vocabulary/")
@Component
public class VocabularyService extends AbstractDaoService {
//create cache
@Component
public static class CachingSetup implements JCacheManagerCustomizer {
public static final String CONCEPT_DETAIL_CACHE = "conceptDetail";
public static final String CONCEPT_RELATED_CACHE = "conceptRelated";
public static final String CONCEPT_HIERARCHY_CACHE = "conceptHierarchy";
@Override
public void customize(CacheManager cacheManager) {
// due to unit tests causing application contexts to reload cache manager caches, we
// have to check for the existance of a cache before creating it
Set<String> cacheNames = CacheHelper.getCacheNames(cacheManager);
// Evict when a cohort definition is created or updated, or permissions, or tags
if (!cacheNames.contains(CONCEPT_DETAIL_CACHE)) {
cacheManager.createCache(CONCEPT_DETAIL_CACHE, new MutableConfiguration<String, Concept>()
.setTypes(String.class, Concept.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
if (!cacheNames.contains(CONCEPT_RELATED_CACHE)) {
cacheManager.createCache(CONCEPT_RELATED_CACHE, new MutableConfiguration<String, Collection<RelatedConcept>>()
.setTypes(String.class, (Class<Collection<RelatedConcept>>) (Class<?>) Collection.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
if (!cacheNames.contains(CONCEPT_HIERARCHY_CACHE)) {
cacheManager.createCache(CONCEPT_HIERARCHY_CACHE, new MutableConfiguration<String, Collection<RelatedConcept>>()
.setTypes(String.class, (Class<Collection<RelatedConcept>>) (Class<?>) Collection.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
}
}
private static Hashtable<String, VocabularyInfo> vocabularyInfoCache = null;
public static final String DEFAULT_SEARCH_ROWS = "20000";
@Autowired
private SourceService sourceService;
@Autowired
private VocabularySearchService vocabSearchService;
@Autowired
protected GenericConversionService conversionService;
@Autowired
private ConceptSetCompareService conceptSetCompareService;
@Autowired
private ObjectMapper objectMapper;
@Value("${datasource.driverClassName}")
private String driver;
private final RowMapper<Concept> rowMapper = (resultSet, arg1) -> {
final Concept concept = new Concept();
concept.conceptId = resultSet.getLong("CONCEPT_ID");
concept.conceptCode = resultSet.getString("CONCEPT_CODE");
concept.conceptName = resultSet.getString("CONCEPT_NAME");
concept.standardConcept = resultSet.getString("STANDARD_CONCEPT");
concept.invalidReason = resultSet.getString("INVALID_REASON");
concept.conceptClassId = resultSet.getString("CONCEPT_CLASS_ID");
concept.vocabularyId = resultSet.getString("VOCABULARY_ID");
concept.domainId = resultSet.getString("DOMAIN_ID");
concept.validStartDate = resultSet.getDate("VALID_START_DATE");
concept.validEndDate = resultSet.getDate("VALID_END_DATE");
return concept;
};
public RowMapper<Concept> getRowMapper() {
return this.rowMapper;
}
private String getDefaultVocabularySourceKey() {
Source vocabSource = sourceService.getPriorityVocabularySource();
return Objects.nonNull(vocabSource) ? vocabSource.getSourceKey() : null;
}
public Source getPriorityVocabularySource() {
Source source = sourceService.getPriorityVocabularySource();
if (Objects.isNull(source)) {
throw new ForbiddenException();
}
return source;
}
public ConceptSetExport exportConceptSet(ConceptSet conceptSet, SourceInfo vocabSource) {
ConceptSetExport export = conversionService.convert(conceptSet, ConceptSetExport.class);
// Lookup the identifiers
export.identifierConcepts = executeIncludedConceptLookup(vocabSource.sourceKey, conceptSet.expression);
// Lookup the mapped items
export.mappedConcepts = executeMappedLookup(vocabSource.sourceKey, conceptSet.expression);
return export;
}
/**
* Calculates the full set of ancestor and descendant concepts for a list of
* ancestor and descendant concepts specified. This is used by ATLAS when
* navigating the list of included concepts in a concept set - the full list
* of ancestors (as defined in the concept set) and the descendants (those
* concepts included when resolving the concept set) are used to determine
* which descendant concepts share one or more ancestors.
*
* @summary Calculates ancestors for a list of concepts
* @param ids Concepts identifiers from concept set
* @return A map of the form: {id -> List<ascendant id>}
*/
@Path("{sourceKey}/lookup/identifiers/ancestors")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Map<Long, List<Long>> calculateAscendants(@PathParam("sourceKey") String sourceKey, Ids ids) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
if (CollectionUtils.isEmpty(ids.ancestors) || CollectionUtils.isEmpty(ids.descendants)) {
return new HashMap<>();
}
final int limit = Math.floorDiv(PreparedSqlRender.getParameterLimit(source), 2);
final List<Map.Entry<Long, Long>> result = new ArrayList<>();
// Here we calculate cartesian product of batches
for (final List<Long> ancestorsBatch : Lists.partition(ids.ancestors, limit)) {
for (final List<Long> descendantsBatch : Lists.partition(ids.descendants, limit)) {
final PreparedStatementRenderer psr = prepareAscendantsCalculating(
ancestorsBatch.toArray(new Long[0]),
descendantsBatch.toArray(new Long[0]),
source
);
result.addAll(getSourceJdbcTemplate(source)
.query(
psr.getSql(),
psr.getSetter(),
(resultSet, arg1) -> Maps.immutableEntry(resultSet.getLong("ANCESTOR_ID"), resultSet.getLong("DESCENDANT_ID"))));
}
}
return result
.stream()
.collect(
Collectors.groupingBy(
Map.Entry::getValue,
Collectors.mapping(
Map.Entry::getKey,
Collectors.toList()
)
)
);
}
private static class Ids {
public List<Long> ancestors;
public List<Long> descendants;
}
protected PreparedStatementRenderer prepareAscendantsCalculating(Long[] identifiers, Long[] descendants, Source source) {
String sqlPath = "/resources/vocabulary/sql/calculateAscendants.sql";
String tqName = "CDM_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
return new PreparedStatementRenderer(source, sqlPath, tqName, tqValue,
new String[]{ "ancestors", "descendants" },
new Object[]{ identifiers, descendants });
}
/**
* Get concepts from concept identifiers (IDs) from a specific source
*
* @summary Perform a lookup of an array of concept identifiers returning the
* matching concepts with their detailed properties.
* @param sourceKey path parameter specifying the source key identifying the
* source to use for access to the set of vocabulary tables
* @param identifiers an array of concept identifiers
* @return A collection of concepts
*/
@Path("{sourceKey}/lookup/identifiers")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeIdentifierLookup(@PathParam("sourceKey") String sourceKey, long[] identifiers) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return executeIdentifierLookup(source, identifiers);
}
protected Collection<Concept> executeIdentifierLookup(Source source, long[] identifiers) {
Collection<Concept> concepts = new ArrayList<>();
if (identifiers.length == 0) {
return concepts;
} else {
// Determine if we need to chunk up ther request based on the parameter
// limit of the source RDBMS
int parameterLimit = PreparedSqlRender.getParameterLimit(source);
if (parameterLimit > 0 && identifiers.length > parameterLimit) {
concepts = executeIdentifierLookup(source, Arrays.copyOfRange(identifiers, parameterLimit, identifiers.length));
identifiers = Arrays.copyOfRange(identifiers, 0, parameterLimit);
}
PreparedStatementRenderer psr = prepareExecuteIdentifierLookup(identifiers, source);
return concepts.addAll(getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), this.rowMapper))
? concepts : new ArrayList<>();
}
}
protected PreparedStatementRenderer prepareExecuteIdentifierLookup(long[] identifiers, Source source) {
String sqlPath = "/resources/vocabulary/sql/lookupIdentifiers.sql";
String tqName = "CDM_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
return new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "identifiers", identifiers);
}
/**
* Get concepts from concept identifiers (IDs) from the default vocabulary
* source
*
* @summary Perform a lookup of an array of concept identifiers returning the
* matching concepts with their detailed properties, using the default source.
* @param identifiers an array of concept identifiers
* @return A collection of concepts
*/
@Path("lookup/identifiers")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeIdentifierLookup(long[] identifiers) {
String defaultSourceKey = getDefaultVocabularySourceKey();
if (defaultSourceKey == null)
throw new WebApplicationException(new Exception("No vocabulary or cdm daimon was found in configured sources. Search failed."), Response.Status.SERVICE_UNAVAILABLE); // http 503
return executeIdentifierLookup(defaultSourceKey, identifiers);
}
public Collection<Concept> executeIncludedConceptLookup(String sourceKey, ConceptSetExpression conceptSetExpression) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
ConceptSetExpressionQueryBuilder builder = new ConceptSetExpressionQueryBuilder();
String query = builder.buildExpressionQuery(conceptSetExpression);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "vocabulary_database_schema", tqValue);
String sqlPath = "/resources/vocabulary/sql/lookupIdentifiers.sql";
String[] searches = new String[]{"identifiers", "CDM_schema"};
String[] replacements = new String[]{psr.getSql(), tqValue};
psr = new PreparedStatementRenderer(source, sqlPath, searches, replacements, (String[]) null, null);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), this.rowMapper);
}
/**
* Get concepts from source codes from a specific source
*
* @summary Lookup source codes from the concept CONCEPT_CODE field
* in the specified vocabulary
* @param sourceKey path parameter specifying the source key identifying the
* source to use for access to the set of vocabulary tables
* @param sourcecodes array of source codes
* @return A collection of concepts
*/
@Path("{sourceKey}/lookup/sourcecodes")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSourcecodeLookup(@PathParam("sourceKey") String sourceKey, String[] sourcecodes) {
if (sourcecodes.length == 0) {
return new ArrayList<>();
}
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareExecuteSourcecodeLookup(sourcecodes, source);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), this.rowMapper);
}
protected PreparedStatementRenderer prepareExecuteSourcecodeLookup(String[] sourcecodes, Source source) {
String sqlPath = "/resources/vocabulary/sql/lookupSourcecodes.sql";
String tqName = "CDM_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "sourcecodes", sourcecodes);
return psr;
}
/**
* Get concepts from source codes from the default vocabulary source
*
* @summary Lookup source codes from the concept CONCEPT_CODE field
* in the specified vocabulary
* @param sourcecodes array of source codes
* @return A collection of concepts
*/
@Path("lookup/sourcecodes")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSourcecodeLookup(String[] sourcecodes) {
String defaultSourceKey = getDefaultVocabularySourceKey();
if (defaultSourceKey == null)
throw new WebApplicationException(new Exception("No vocabulary or cdm daimon was found in configured sources. Search failed."), Response.Status.SERVICE_UNAVAILABLE); // http 503
return executeSourcecodeLookup(defaultSourceKey, sourcecodes);
}
/**
* Get concepts mapped to the selected concept identifiers from a
* specific source. Find all concepts mapped to the concept identifiers
* provided. This end-point will check the CONCEPT, CONCEPT_RELATIONSHIP and
* SOURCE_TO_CONCEPT_MAP tables.
*
* @summary Concepts mapped to other concepts
* @param sourceKey path parameter specifying the source key identifying the
* source to use for access to the set of vocabulary tables
* @param identifiers an array of concept identifiers
* @return A collection of concepts
*/
@Path("{sourceKey}/lookup/mapped")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeMappedLookup(@PathParam("sourceKey") String sourceKey, long[] identifiers) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return executeMappedLookup(source, identifiers);
}
protected Collection<Concept> executeMappedLookup(Source source, long[] identifiers) {
Collection<Concept> concepts = new HashSet<>();
if (identifiers.length == 0) {
return concepts;
} else {
// Determine if we need to chunk up the request based on the parameter
// limit of the source RDBMS
int parameterLimit = PreparedSqlRender.getParameterLimit(source);
// Next take into account the fact that the identifiers are used in 3
// places in the query so the parameter limit will need to be divided
parameterLimit = Math.floorDiv(parameterLimit, 3);
if (parameterLimit > 0 && identifiers.length > parameterLimit) {
concepts = executeMappedLookup(source, Arrays.copyOfRange(identifiers, parameterLimit, identifiers.length));
identifiers = Arrays.copyOfRange(identifiers, 0, parameterLimit);
}
PreparedStatementRenderer psr = prepareExecuteMappedLookup(identifiers, source);
return concepts.addAll(getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), this.rowMapper))
? concepts : new HashSet<>();
}
}
protected PreparedStatementRenderer prepareExecuteMappedLookup(long[] identifiers, Source source) {
String tqName = "CDM_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String resourcePath = "/resources/vocabulary/sql/getMappedSourcecodes.sql";
return new PreparedStatementRenderer(source, resourcePath, tqName, tqValue, "identifiers", identifiers);
}
/**
* Get concepts mapped to the selected concept identifiers from a
* specific source. Find all concepts mapped to the concept identifiers
* provided. This end-point will check the CONCEPT, CONCEPT_RELATIONSHIP and
* SOURCE_TO_CONCEPT_MAP tables.
*
* @summary Concepts mapped to other concepts
* @param identifiers an array of concept identifiers
* @return A collection of concepts
*/
@Path("lookup/mapped")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeMappedLookup(long[] identifiers) {
String defaultSourceKey = getDefaultVocabularySourceKey();
if (defaultSourceKey == null)
throw new WebApplicationException(new Exception("No vocabulary or cdm daimon was found in configured sources. Search failed."), Response.Status.SERVICE_UNAVAILABLE); // http 503
return executeMappedLookup(defaultSourceKey, identifiers);
}
public Collection<Concept> executeMappedLookup(String sourceKey, ConceptSetExpression conceptSetExpression) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
String tableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
ConceptSetExpressionQueryBuilder builder = new ConceptSetExpressionQueryBuilder();
String query = builder.buildExpressionQuery(conceptSetExpression);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "vocabulary_database_schema", tableQualifier);
String sqlPath = "/resources/vocabulary/sql/getMappedSourcecodes.sql";
String[] search = new String[]{"identifiers", "CDM_schema"};
String[] replace = new String[]{psr.getSql(), tableQualifier};
psr = new PreparedStatementRenderer(source, sqlPath, search, replace, (String[]) null, null);
return getSourceJdbcTemplate(source).query(psr.getSql(), this.rowMapper);
}
/**
* Search for a concept on the selected source.
*
* @summary Search for a concept on the selected source
* @param sourceKey The source key for the concept search
* @param search The ConceptSearch parameters
* @return A collection of concepts
*/
@Path("{sourceKey}/search")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSearch(@PathParam("sourceKey") String sourceKey, ConceptSearch search) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareExecuteSearch(search, source);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), rowMapper);
}
protected PreparedStatementRenderer prepareExecuteSearch(ConceptSearch search, Source source) {
// escape for bracket
search.query = search.query.replace("[", "[[]");
String resourcePath = search.isLexical ? "/resources/vocabulary/sql/searchLexical.sql" : "/resources/vocabulary/sql/search.sql";
String searchSql = ResourceHelper.GetResourceAsString(resourcePath);
String tqName = "CDM_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
List<String> searchNamesList = new ArrayList<>();
List<Object> replacementNamesList = new ArrayList<>();
List<String> variableNameList = new ArrayList<>();
List<Object> variableValueList = new ArrayList<>();
searchNamesList.add(tqName);
replacementNamesList.add(tqValue);
String filters = "";
if (search.domainId != null && search.domainId.length > 0) {
// lexical search domain filters work slightly differeant than non-lexical
if (!search.isLexical) {
// use domain_ids as-is
filters += " AND DOMAIN_ID IN (@domainId)";
variableNameList.add("domainId");
variableValueList.add(search.domainId);
} else {
// MEASUREMENT domain is a special case where we want to ensure concept class is 'lab test' or 'procedure'
ArrayList<String> domainClauses = new ArrayList<>();
String[] nonMeasurementDomains = Stream.of(search.domainId).filter(s -> !"Measurement".equals(s)).collect(Collectors.toList()).toArray(new String[0]);
if (nonMeasurementDomains.length > 0) {
domainClauses.add("DOMAIN_ID IN (@domainId)");
variableNameList.add("domainId");
variableValueList.add(nonMeasurementDomains);
}
if (Arrays.asList(search.domainId).contains("Measurement")) {
domainClauses.add("(DOMAIN_ID = 'Measurement' and LOWER(concept_class_id) in ('lab test', 'procedure'))");
}
if (!domainClauses.isEmpty()) {
filters += String.format(" AND (%s)", StringUtils.join(domainClauses, " OR "));
}
}
}
if (search.vocabularyId != null && search.vocabularyId.length > 0) {
filters += " AND VOCABULARY_ID IN (@vocabularyId)";
variableNameList.add("vocabularyId");
variableValueList.add(search.vocabularyId);
}
if (search.conceptClassId != null && search.conceptClassId.length > 0) {
filters += " AND CONCEPT_CLASS_ID IN (@conceptClassId)";
variableNameList.add("conceptClassId");
variableValueList.add(search.conceptClassId);
}
if (search.invalidReason != null && !search.invalidReason.trim().isEmpty()) {
if (search.invalidReason.equals("V")) {
filters += " AND INVALID_REASON IS NULL ";
} else {
filters += " AND INVALID_REASON = @invalidReason";
variableNameList.add("invalidReason");
variableValueList.add(search.invalidReason.trim());
}
}
if (search.standardConcept != null) {
if (search.standardConcept.equals("N")) {
filters += " AND STANDARD_CONCEPT IS NULL ";
} else {
filters += " AND STANDARD_CONCEPT = @standardConcept";
variableNameList.add("standardConcept");
variableValueList.add(search.standardConcept.trim());
}
}
if (search.isLexical) {
// 1. Create term variables for the expressions including truncated terms (terms >=8 are truncated to 6 letters
List<String> searchTerms = Arrays.asList(StringUtils.split(search.query.toLowerCase(), " "));
List<String> allTerms = Stream.concat(
searchTerms.stream(),
searchTerms.stream().filter(i -> i.length() >= 8).map(i -> StringUtils.left(i,6))
).sorted((a,b) -> b.length() - a.length()).collect(Collectors.toList());
LinkedHashMap<String, String> termMap = new LinkedHashMap<>();
for (int i=0;i<allTerms.size();i++) {
termMap.put(String.format("term_%d",i+1), allTerms.get(i));
}
// 2. Create REPLACE expressions to caluclate the match ratio
String replaceExpression = termMap.keySet().stream()
.reduce("", (acc, element) -> {
return "".equals(acc) ?
String.format("REPLACE(lower(concept_name), '@%s','')",element) // the first iteration
: String.format("REPLACE(%s, '@%s','')", acc, element); // the subsequent iterations
});
searchNamesList.add("replace_expression");
replacementNamesList.add(replaceExpression);
// 3. Create the set of 'like' expressions for concept name from the terms that are < 8 chars
List<String> nameFilterList = termMap.keySet().stream()
.filter(k -> termMap.get(k).length() < 8)
.map(k -> String.format("lower(concept_name) like '%%@%s%%'",k))
.collect(Collectors.toList());
searchNamesList.add("name_filters");
replacementNamesList.add(StringUtils.join(nameFilterList, " AND "));
// 4. Create the set of 'like' expressions for concept synonyms
List<String> synonymFilterList = termMap.keySet().stream()
.filter(k -> termMap.get(k).length() < 8)
.map(k -> String.format("lower(concept_synonym_name) like '%%@%s%%'",k))
.collect(Collectors.toList());
searchNamesList.add("synonym_filters");
replacementNamesList.add(StringUtils.join(synonymFilterList, " AND "));
// 5. Create name-value pairs for each term paramater
for (Map.Entry<String,String> entry : termMap.entrySet()) {
variableNameList.add(entry.getKey());
variableValueList.add(entry.getValue());
}
} else {
if (!search.query.isEmpty()) {
String queryFilter = "LOWER(CONCEPT_NAME) LIKE '%@query%' or LOWER(CONCEPT_CODE) LIKE '%@query%'";
if (StringUtils.isNumeric(search.query)) {
queryFilter += " or CONCEPT_ID = CAST(@query as int)";
}
filters += " AND (" + queryFilter + ")";
variableNameList.add("query");
variableValueList.add(search.query.toLowerCase());
}
}
searchSql = StringUtils.replace(searchSql, "@filters", filters);
String[] searchNames = searchNamesList.toArray(new String[0]);
String[] replacementNames = replacementNamesList.toArray(new String[0]);
String[] variableNames = variableNameList.toArray(new String[variableNameList.size()]);
Object[] variableValues = variableValueList.toArray(new Object[variableValueList.size()]);
PreparedStatementRenderer renderer = new PreparedStatementRenderer(source, searchSql, searchNames, replacementNames, variableNames, variableValues);
String debugSql = renderer.generateDebugSql(searchSql, searchNames, replacementNames, variableNames, variableValues);
return renderer;
}
/**
* Search for a concept on the default vocabulary source.
*
* @summary Search for a concept (default vocabulary source)
* @param search The ConceptSearch parameters
* @return A collection of concepts
*/
@Path("search")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSearch(ConceptSearch search) {
String defaultSourceKey = getDefaultVocabularySourceKey();
if (defaultSourceKey == null)
throw new WebApplicationException(new Exception("No vocabulary or cdm daimon was found in configured sources. Search failed."), Response.Status.SERVICE_UNAVAILABLE); // http 503
return executeSearch(defaultSourceKey, search);
}
/**
* Search for a concept based on a query using the selected vocabulary source.
*
* @summary Search for a concept using a query
* @param sourceKey The source key holding the OMOP vocabulary
* @param query The query to use to search for concepts
* @return A collection of concepts
*/
@Path("{sourceKey}/search/{query}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSearch(@PathParam("sourceKey") String sourceKey, @PathParam("query") String query) {
return this.executeSearch(sourceKey, query, DEFAULT_SEARCH_ROWS);
}
/**
* Search for a concept based on a query using the default vocabulary source.
* NOTE: This method uses the query as part of the URL query string
*
* @summary Search for a concept using a query (default vocabulary)
* @param sourceKey The source key holding the OMOP vocabulary
* @param query The query to use to search for concepts
* @param rows The number of rows to return.
* @return A collection of concepts
*/
@Path("{sourceKey}/search")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Collection<Concept> executeSearch(@PathParam("sourceKey") String sourceKey, @QueryParam("query") String query, @DefaultValue(DEFAULT_SEARCH_ROWS) @QueryParam("rows") String rows) {
// Verify that the rows parameter contains an integer and is > 0
try {
Integer r = Integer.parseInt(rows);
if (r <= 0) {
throw new NumberFormatException("The rows parameter must be greater than 0");
}
} catch (NumberFormatException nfe) {
throw nfe;
}
Collection<Concept> concepts = new ArrayList<>();
try {
Source source = getSourceRepository().findBySourceKey(sourceKey);
VocabularyInfo vocabularyInfo = getInfo(sourceKey);
String versionKey = vocabularyInfo.version.replace(' ', '_');
SearchProviderConfig searchConfig = new SearchProviderConfig(source.getSourceKey(), versionKey);
concepts = vocabSearchService.getSearchProvider(searchConfig).executeSearch(searchConfig, query, rows);
} catch (Exception ex) {
log.error("An error occurred during the vocabulary search", ex);
}
return concepts;
}
public PreparedStatementRenderer prepareExecuteSearchWithQuery(String query, Source source) {
ConceptSearch search = new ConceptSearch();
search.query = query;
return this.prepareExecuteSearch(search, source);
}
/**
* Search for a concept based on a query using the default vocabulary source.
* NOTE: This method uses the query as part of the URL and not the
* query string
*
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/JobService.java | src/main/java/org/ohdsi/webapi/service/JobService.java | package org.ohdsi.webapi.service;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobInstanceResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.job.JobUtils;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.springframework.batch.admin.service.SearchableJobExecutionDao;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.launch.NoSuchJobException;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.scope.context.StepSynchronizationManager;
import org.springframework.batch.core.step.StepLocator;
import org.springframework.batch.core.step.tasklet.StoppableTasklet;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.core.step.tasklet.TaskletStep;
import org.springframework.dao.DataAccessException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Predicate;
/**
* REST Services related to working with the Spring Batch jobs
*
* @summary Jobs
*/
@Path("/job/")
@Component
public class JobService extends AbstractDaoService {
private final JobExplorer jobExplorer;
private final SearchableJobExecutionDao jobExecutionDao;
private final JobRepository jobRepository;
private final JobTemplate jobTemplate;
private Map<Long, Job> jobMap = new HashMap<>();
public JobService(JobExplorer jobExplorer, SearchableJobExecutionDao jobExecutionDao, JobRepository jobRepository, JobTemplate jobTemplate) {
this.jobExplorer = jobExplorer;
this.jobExecutionDao = jobExecutionDao;
this.jobRepository = jobRepository;
this.jobTemplate = jobTemplate;
}
/**
* Get the job information by job ID
*
* @summary Get job by ID
* @param jobId The job ID
* @return The job information
*/
@GET
@Path("{jobId}")
@Produces(MediaType.APPLICATION_JSON)
public JobInstanceResource findJob(@PathParam("jobId") final Long jobId) {
final JobInstance job = this.jobExplorer.getJobInstance(jobId);
if (job == null) {
return null;//TODO #8 conventions under review
}
return JobUtils.toJobInstanceResource(job);
}
/**
* Get the job execution information by job type and name
*
* @summary Get job by name and type
* @param jobName The job name
* @param jobType The job type
* @return JobExecutionResource
*/
@GET
@Path("/type/{jobType}/name/{jobName}")
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource findJobByName(@PathParam("jobName") final String jobName, @PathParam("jobType") final String jobType) {
final Optional<JobExecution> jobExecution = jobExplorer.findRunningJobExecutions(jobType).stream()
.filter(job -> jobName.equals(job.getJobParameters().getString(Constants.Params.JOB_NAME)))
.findFirst();
return jobExecution.isPresent() ? JobUtils.toJobExecutionResource(jobExecution.get()) : null;
}
/**
* Get the job execution information by execution ID and job ID
*
* @summary Get job by job ID and execution ID
* @param jobId The job ID
* @param executionId The execution ID
* @return JobExecutionResource
*/
@GET
@Path("{jobId}/execution/{executionId}")
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource findJobExecution(@PathParam("jobId") final Long jobId,
@PathParam("executionId") final Long executionId) {
return service(jobId, executionId);
}
/**
* Find job execution by execution ID
*
* @summary Get job by execution ID
* @param executionId The job execution ID
* @return JobExecutionResource
*/
@GET
@Path("/execution/{executionId}")
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource findJobExecution(@PathParam("executionId") final Long executionId) {
return service(null, executionId);
}
private JobExecutionResource service(final Long jobId, final Long executionId) {
final JobExecution exec = this.jobExplorer.getJobExecution(executionId);
if ((exec == null) || ((jobId != null) && !jobId.equals(exec.getJobId()))) {
return null;//TODO #8 conventions under review
}
return JobUtils.toJobExecutionResource(exec);
}
/**
* Get job names (unique names). Note: this path (GET /job) should really
* return pages of job instances. This could be implemented should the need
* arise. See {@link JobService#list(String, Integer, Integer)} to obtain
* executions and filter by job name.
*
* @summary Get list of jobs
* @return A list of jobs
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<String> findJobNames() {
return this.jobExplorer.getJobNames();
}
/**
* <i>Variation of spring-batch-admin support:
* org.springframework.batch.admin.web.BatchJobExecutionsController</i>.
* <p>
* Return a paged collection of job executions. Filter for a given job.
* Returned in pages.
*
* @summary Get job executions with filters
* @param jobName name of the job
* @param pageIndex start index for the job execution list
* @param pageSize page size for the list
* @param comprehensivePage boolean if true returns a comprehensive resultset
* as a page (i.e. pageRequest(0,resultset.size()))
* @return collection of JobExecutionInfo
* @throws NoSuchJobException
*/
@GET
@Path("/execution")
@Produces(MediaType.APPLICATION_JSON)
public Page<JobExecutionResource> list(@QueryParam("jobName") final String jobName,
@DefaultValue("0") @QueryParam("pageIndex") final Integer pageIndex,
@DefaultValue("20") @QueryParam("pageSize") final Integer pageSize,
@QueryParam("comprehensivePage") boolean comprehensivePage)
throws NoSuchJobException {
List<JobExecutionResource> resources = null;
if (comprehensivePage) {
String sqlPath = "/resources/job/sql/jobExecutions.sql";
String tqName = "ohdsi_schema";
String tqValue = getOhdsiSchema();
PreparedStatementRenderer psr = new PreparedStatementRenderer(null, sqlPath, tqName, tqValue);
resources = getJdbcTemplate().query(psr.getSql(), psr.getSetter(), new ResultSetExtractor<List<JobExecutionResource>>() {
@Override
public List<JobExecutionResource> extractData(ResultSet rs) throws SQLException, DataAccessException {
return JobUtils.toJobExecutionResource(rs);
}
});
return new PageImpl<>(resources, new PageRequest(0, pageSize), resources.size());
} else {
resources = new ArrayList<>();
for (final JobExecution jobExecution : (jobName == null ? this.jobExecutionDao.getJobExecutions(pageIndex,
pageSize) : this.jobExecutionDao.getJobExecutions(jobName, pageIndex, pageSize))) {
resources.add(JobUtils.toJobExecutionResource(jobExecution));
}
return new PageImpl<>(resources, new PageRequest(pageIndex, pageSize),
this.jobExecutionDao.countJobExecutions());
}
}
public void stopJob(JobExecution jobExecution, Job job) {
if (Objects.nonNull(job)) {
jobExecution.getStepExecutions().stream()
.filter(step -> step.getStatus().isRunning())
.forEach(stepExec -> {
Step step = ((StepLocator) job).getStep(stepExec.getStepName());
if (step instanceof TaskletStep) {
Tasklet tasklet = ((TaskletStep) step).getTasklet();
if (tasklet instanceof StoppableTasklet) {
StepSynchronizationManager.register(stepExec);
((StoppableTasklet) tasklet).stop();
StepSynchronizationManager.release();
}
}
});
}
if (jobExecution.getEndTime() == null) {
jobExecution.setStatus(BatchStatus.STOPPING);
jobRepository.update(jobExecution);
}
}
public JobExecution getJobExecution(Long jobExecutionId) {
return jobExplorer.getJobExecution(jobExecutionId);
}
public Job getRunningJob(Long jobExecutionId) {
return jobMap.get(jobExecutionId);
}
public void removeJob(Long jobExecutionId) {
jobMap.remove(jobExecutionId);
}
public JobExecutionResource runJob(Job job, JobParameters jobParameters) {
JobExecutionResource jobExecution = this.jobTemplate.launch(job, jobParameters);
jobMap.put(jobExecution.getExecutionId(), job);
return jobExecution;
}
@Transactional
public void cancelJobExecution(Predicate<? super JobExecution> filterPredicate) {
jobExecutionDao.getRunningJobExecutions().stream()
.filter(filterPredicate)
.findFirst()
.ifPresent(jobExecution -> {
Job job = getRunningJob(jobExecution.getJobId());
if (Objects.nonNull(job)) {
stopJob(jobExecution, job);
}
});
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/HttpClient.java | src/main/java/org/ohdsi/webapi/service/HttpClient.java | package org.ohdsi.webapi.service;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import javax.annotation.PostConstruct;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.springframework.stereotype.Component;
@Component
public class HttpClient {
private Client client;
@PostConstruct
private void init() throws KeyManagementException, NoSuchAlgorithmException {
this.client = getClient();
}
private Client getClient() throws NoSuchAlgorithmException, KeyManagementException {
TrustManager[] trustAllCerts = new TrustManager[]{
new X509TrustManager() {
@Override
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkClientTrusted(
java.security.cert.X509Certificate[] certs, String authType) {
}
@Override
public void checkServerTrusted(
java.security.cert.X509Certificate[] certs, String authType) {
}
}
};
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, trustAllCerts, null);
return ClientBuilder.newBuilder()
.sslContext(sslContext)
.register(MultiPartFeature.class)
.build();
}
public WebTarget target(final String executionEngineURL) {
return client.target(executionEngineURL);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/FeatureExtractionService.java | src/main/java/org/ohdsi/webapi/service/FeatureExtractionService.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.service;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import org.springframework.stereotype.Component;
import org.ohdsi.featureExtraction.FeatureExtraction;
/**
*
* @author asena5
* @author alondhe2
*/
@Path("/featureextraction/")
@Component
public class FeatureExtractionService extends AbstractDaoService {
/**
* Get default feature extraction settings
* @param temporal Use temporal covariate settings? true or false (default)
* @return JSON with default covariate settings object
*/
@GET
@Path("defaultcovariatesettings")
@Produces(MediaType.APPLICATION_JSON)
public String getDefaultCovariateSettings(@QueryParam("temporal") final String temporal) {
boolean getTemporal = false;
try {
if (temporal != null && !temporal.isEmpty()) {
getTemporal = Boolean.parseBoolean(temporal);
}
} catch (Exception e) {
throw new IllegalArgumentException("The parameter temporal must be a string of true or false.");
}
FeatureExtraction.init(null);
String settings = "";
if (getTemporal) {
settings = FeatureExtraction.getDefaultPrespecTemporalAnalyses();
} else {
settings = FeatureExtraction.getDefaultPrespecAnalyses();
}
return settings;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/ActivityService.java | src/main/java/org/ohdsi/webapi/service/ActivityService.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import java.util.ArrayList;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.ohdsi.webapi.activity.Tracker;
import org.springframework.stereotype.Component;
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary Activity
*/
@Path("/activity/")
@Component
public class ActivityService {
/**
* Example REST service - will be depreciated
* in a future release
*
* @deprecated
* @summary DO NOT USE
*/
@Path("latest")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Object[] getLatestActivity() {
return Tracker.getActivity();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/IRAnalysisService.java | src/main/java/org/ohdsi/webapi/service/IRAnalysisService.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraphUtils;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.opencsv.CSVWriter;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.IterableUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.check.CheckResult;
import org.ohdsi.webapi.check.checker.ir.IRChecker;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO;
import org.ohdsi.webapi.common.DesignImportService;
import org.ohdsi.webapi.common.generation.GenerateSqlResult;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.ohdsi.webapi.ircalc.AnalysisReport;
import org.ohdsi.webapi.ircalc.ExecutionInfo;
import org.ohdsi.webapi.ircalc.IRAnalysisInfoListener;
import org.ohdsi.webapi.ircalc.IRAnalysisQueryBuilder;
import org.ohdsi.webapi.ircalc.IRAnalysisTasklet;
import org.ohdsi.webapi.ircalc.IRExecutionInfoRepository;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisDetails;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExportExpression;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExpression;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisRepository;
import org.ohdsi.webapi.ircalc.dto.IRVersionFullDTO;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.service.dto.AnalysisInfoDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisShortDTO;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.annotations.DataSourceAccess;
import org.ohdsi.webapi.shiro.annotations.SourceKey;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.tag.domain.HasTags;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.util.ExportUtil;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.util.NameUtils;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.ohdsi.webapi.versioning.domain.IRVersion;
import org.ohdsi.webapi.versioning.domain.Version;
import org.ohdsi.webapi.versioning.domain.VersionBase;
import org.ohdsi.webapi.versioning.domain.VersionType;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.ohdsi.webapi.versioning.service.VersionService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import javax.annotation.PostConstruct;
import javax.servlet.ServletContext;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.ohdsi.webapi.Constants.GENERATE_IR_ANALYSIS;
import static org.ohdsi.webapi.Constants.Params.ANALYSIS_ID;
import static org.ohdsi.webapi.Constants.Params.JOB_NAME;
import static org.ohdsi.webapi.Constants.Params.SOURCE_ID;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
* @author Chris Knoll <cknoll@ohdsi.org>
*/
@Component
public class IRAnalysisService extends AbstractDaoService implements
GeneratesNotification, IRAnalysisResource {
private static final Logger log = LoggerFactory.getLogger(IRAnalysisService.class);
private final static String STRATA_STATS_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/strata_stats.sql");
private static final String NAME = "irAnalysis";
private static final String NO_INCIDENCE_RATE_ANALYSIS_MESSAGE = "There is no incidence rate analysis with id = %d.";
private static final EntityGraph ANALYSIS_WITH_EXECUTION_INFO = EntityGraphUtils.fromName("IncidenceRateAnalysis.withExecutionInfoList");
private final IRAnalysisQueryBuilder queryBuilder;
@Value("${security.defaultGlobalReadPermissions}")
private boolean defaultGlobalReadPermissions;
@Autowired
private IncidenceRateAnalysisRepository irAnalysisRepository;
@Autowired
private IRExecutionInfoRepository irExecutionInfoRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private JobService jobService;
@Autowired
private Security security;
@Autowired
private SourceService sourceService;
@Autowired
private GenerationUtils generationUtils;
@Autowired
ConversionService conversionService;
@Autowired
private ObjectMapper objectMapper;
//Directly wired since IRAnalysisService is directly called by Jersey and @DataSourceAccess wouldn't work in this case
@Autowired
private SourceAccessor sourceAccessor;
@Autowired
private CohortDefinitionRepository cohortDefinitionRepository;
@Autowired
private DesignImportService designImportService;
@Context
private ServletContext context;
@Autowired
private IRChecker checker;
@Autowired
private PermissionService permissionService;
@Autowired
private VersionService<IRVersion> versionService;
public IRAnalysisService(final ObjectMapper objectMapper) {
this.queryBuilder = new IRAnalysisQueryBuilder(objectMapper);
}
private ExecutionInfo findExecutionInfoBySourceId(Collection<ExecutionInfo> infoList, Integer sourceId) {
for (ExecutionInfo info : infoList) {
if (sourceId.equals(info.getId().getSourceId())) {
return info;
}
}
return null;
}
public static class StratifyReportItem {
public long bits;
public long totalPersons;
public long timeAtRisk;
public long cases;
}
public static class GenerateSqlRequest {
public GenerateSqlRequest() {
}
@JsonProperty("analysisId")
public Integer analysisId;
@JsonProperty("expression")
public IncidenceRateAnalysisExpression expression;
@JsonProperty("options")
public IRAnalysisQueryBuilder.BuildExpressionQueryOptions options;
}
private final RowMapper<AnalysisReport.Summary> summaryMapper = (rs, rowNum) -> {
AnalysisReport.Summary summary = new AnalysisReport.Summary();
summary.targetId = rs.getInt("target_id");
summary.outcomeId = rs.getInt("outcome_id");
summary.totalPersons = rs.getLong("person_count");
summary.timeAtRisk = rs.getLong("time_at_risk");
summary.cases = rs.getLong("cases");
return summary;
};
private List<AnalysisReport.Summary> getAnalysisSummaryList(int id, Source source) {
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String sql = "select target_id, outcome_id, sum(person_count) as person_count, sum(time_at_risk) as time_at_risk," +
" sum(cases) as cases from @tableQualifier.ir_analysis_result where analysis_id = @id GROUP BY target_id, outcome_id";
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), summaryMapper);
}
private final RowMapper<AnalysisReport.StrataStatistic> strataRuleStatisticMapper = (rs, rowNum) -> {
AnalysisReport.StrataStatistic statistic = new AnalysisReport.StrataStatistic();
statistic.id = rs.getInt("strata_sequence");
statistic.name = rs.getString("name");
statistic.targetId = rs.getInt("target_id");
statistic.outcomeId = rs.getInt("outcome_id");
statistic.totalPersons = rs.getLong("person_count");
statistic.timeAtRisk = rs.getLong("time_at_risk");
statistic.cases = rs.getLong("cases");
return statistic;
};
private List<AnalysisReport.StrataStatistic> getStrataStatistics(int id, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, STRATA_STATS_QUERY_TEMPLATE, "results_database_schema", resultsTableQualifier, "analysis_id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), strataRuleStatisticMapper);
}
private int countSetBits(long n) {
int count = 0;
while (n > 0) {
n &= (n - 1);
count++;
}
return count;
}
private String formatBitMask(Long n, int size) {
return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0"));
}
private final RowMapper<StratifyReportItem> stratifyResultsMapper = (rs, rowNum) -> {
StratifyReportItem resultItem = new StratifyReportItem();
resultItem.bits = rs.getLong("strata_mask");
resultItem.totalPersons = rs.getLong("person_count");
resultItem.timeAtRisk = rs.getLong("time_at_risk");
resultItem.cases = rs.getLong("cases");
return resultItem;
};
private String getStrataTreemapData(int analysisId, int targetId, int outcomeId, int inclusionRuleCount, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String query = "select strata_mask, person_count, time_at_risk, cases from @resultsTableQualifier.ir_analysis_result where analysis_id = @analysis_id and target_id = @target_id and outcome_id = @outcome_id";
Object[] paramValues = {analysisId, targetId, outcomeId};
String[] params = {"analysis_id", "target_id", "outcome_id"};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "resultsTableQualifier", resultsTableQualifier, params, paramValues, SessionUtils.sessionId());
// [0] is the inclusion rule bitmask, [1] is the count of the match
List<StratifyReportItem> items = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), stratifyResultsMapper);
Map<Integer, List<StratifyReportItem>> groups = new HashMap<>();
for (StratifyReportItem item : items) {
int bitsSet = countSetBits(item.bits);
if (!groups.containsKey(bitsSet)) {
groups.put(bitsSet, new ArrayList<>());
}
groups.get(bitsSet).add(item);
}
StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : [");
List<Integer> groupKeys = new ArrayList<>(groups.keySet());
Collections.sort(groupKeys);
Collections.reverse(groupKeys);
int groupCount = 0;
// create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy)
for (Integer groupKey : groupKeys) {
if (groupCount > 0) {
treemapData.append(",");
}
treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey));
int groupItemCount = 0;
for (StratifyReportItem groupItem : groups.get(groupKey)) {
if (groupItemCount > 0) {
treemapData.append(",");
}
//sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}");
treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d, \"cases\": %d, \"timeAtRisk\": %d }", formatBitMask(groupItem.bits, inclusionRuleCount), groupItem.totalPersons, groupItem.cases, groupItem.timeAtRisk));
groupItemCount++;
}
groupCount++;
}
treemapData.append(StringUtils.repeat("]}", groupCount + 1));
return treemapData.toString();
}
@Override
public List<IRAnalysisShortDTO> getIRAnalysisList() {
return getTransactionTemplate().execute(transactionStatus -> {
Iterable<IncidenceRateAnalysis> analysisList = this.irAnalysisRepository.findAll();
return StreamSupport.stream(analysisList.spliterator(), false)
.filter(!defaultGlobalReadPermissions ? entity -> permissionService.hasReadAccess(entity) : entity -> true)
.map(analysis -> {
IRAnalysisShortDTO dto = conversionService.convert(analysis, IRAnalysisShortDTO.class);
permissionService.fillWriteAccess(analysis, dto);
permissionService.fillReadAccess(analysis, dto);
return dto;
})
.collect(Collectors.toList());
});
}
@Override
@Transactional
public int getCountIRWithSameName(final int id, String name) {
return irAnalysisRepository.getCountIRWithSameName(id, name);
}
@Override
@Transactional
public IRAnalysisDTO createAnalysis(IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
// it might be possible to leverage saveAnalysis() but not sure how to pull the auto ID from
// the DB to pass it into saveAnalysis (since saveAnalysis does a findOne() at the start).
// If there's a way to get the Entity into the persistence manager so findOne() returns this newly created entity
// then we could create the entity here (without persist) and then call saveAnalysis within the same Tx.
IncidenceRateAnalysis newAnalysis = new IncidenceRateAnalysis();
newAnalysis.setName(StringUtils.trim(analysis.getName()))
.setDescription(analysis.getDescription());
newAnalysis.setCreatedBy(user);
newAnalysis.setCreatedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = new IncidenceRateAnalysisDetails(newAnalysis);
newAnalysis.setDetails(details);
details.setExpression(analysis.getExpression());
}
else {
newAnalysis.setDetails(null);
}
IncidenceRateAnalysis createdAnalysis = this.irAnalysisRepository.save(newAnalysis);
return conversionService.convert(createdAnalysis, IRAnalysisDTO.class);
}
@Override
@Transactional
public IRAnalysisDTO getAnalysis(final int id) {
return getTransactionTemplate().execute(transactionStatus -> {
IncidenceRateAnalysis a = this.irAnalysisRepository.findOne(id);
ExceptionUtils.throwNotFoundExceptionIfNull(a, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id));
return conversionService.convert(a, IRAnalysisDTO.class);
});
}
@Override
public IRAnalysisDTO doImport(final IRAnalysisDTO dto) {
dto.setTags(null);
if (dto.getExpression() != null) {
try {
IncidenceRateAnalysisExportExpression expression = objectMapper.readValue(
dto.getExpression(), IncidenceRateAnalysisExportExpression.class);
// Create lists of ids from list of cohort definitions because we do not store
// cohort definitions in expression now
fillCohortIds(expression.targetIds, expression.targetCohorts);
fillCohortIds(expression.outcomeIds, expression.outcomeCohorts);
String strExpression = objectMapper.writeValueAsString(new IncidenceRateAnalysisExpression(expression));
dto.setExpression(strExpression);
} catch (Exception e) {
log.error("Error converting expression to object", e);
throw new InternalServerErrorException();
}
}
dto.setName(NameUtils.getNameWithSuffix(dto.getName(), this::getNamesLike));
return createAnalysis(dto);
}
@Override
@Transactional
public IRAnalysisDTO export(final Integer id) {
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
ExceptionUtils.throwNotFoundExceptionIfNull(analysis, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id));
try {
IncidenceRateAnalysisExportExpression expression = objectMapper.readValue(
analysis.getDetails().getExpression(), IncidenceRateAnalysisExportExpression.class);
// Cohorts are not stored in expression now - create lists of cohorts from
// lists of their ids
fillCohorts(expression.outcomeIds, expression.outcomeCohorts);
fillCohorts(expression.targetIds, expression.targetCohorts);
expression.outcomeCohorts.forEach(ExportUtil::clearCreateAndUpdateInfo);
expression.targetCohorts.forEach(ExportUtil::clearCreateAndUpdateInfo);
String strExpression = objectMapper.writeValueAsString(expression);
analysis.getDetails().setExpression(strExpression);
} catch (Exception e) {
log.error("Error converting expression to object", e);
throw new InternalServerErrorException();
}
IRAnalysisDTO irAnalysisDTO = conversionService.convert(analysis, IRAnalysisDTO.class);
ExportUtil.clearCreateAndUpdateInfo(irAnalysisDTO);
return irAnalysisDTO;
}
@Override
@Transactional
public IRAnalysisDTO saveAnalysis(final int id, IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
saveVersion(id);
UserEntity user = userRepository.findByLogin(security.getSubject());
IncidenceRateAnalysis updatedAnalysis = this.irAnalysisRepository.findOne(id);
updatedAnalysis.setName(StringUtils.trim(analysis.getName()))
.setDescription(analysis.getDescription());
updatedAnalysis.setModifiedBy(user);
updatedAnalysis.setModifiedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = updatedAnalysis.getDetails();
if (details == null) {
details = new IncidenceRateAnalysisDetails(updatedAnalysis);
updatedAnalysis.setDetails(details);
}
details.setExpression(analysis.getExpression());
}
else
updatedAnalysis.setDetails(null);
this.irAnalysisRepository.save(updatedAnalysis);
return getAnalysis(id);
}
@Override
@DataSourceAccess
public JobExecutionResource performAnalysis(final int analysisId, final @SourceKey String sourceKey) {
IRAnalysisDTO irAnalysisDTO = getAnalysis(analysisId);
CheckResult checkResult = runDiagnostics(irAnalysisDTO);
if (checkResult.hasCriticalErrors()) {
throw new RuntimeException("Cannot be generated due to critical errors in design. Call 'check' service for further details");
}
Date startTime = Calendar.getInstance().getTime();
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey));
sourceAccessor.checkAccess(source);
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager().getTransaction(requresNewTx);
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, ANALYSIS_WITH_EXECUTION_INFO);
ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), source.getSourceId());
if (analysisInfo != null) {
if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
return null; // Exit execution, another process has started it.
}
else {
analysisInfo = new ExecutionInfo(analysis, source);
analysis.getExecutionInfoList().add(analysisInfo);
}
analysisInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.irAnalysisRepository.save(analysis);
this.getTransactionTemplate().getTransactionManager().commit(initStatus);
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString(JOB_NAME, String.format("IR Analysis: %d: %s (%s)", analysis.getId(), source.getSourceName(), source.getSourceKey()));
builder.addString(ANALYSIS_ID, String.valueOf(analysisId));
builder.addString(SOURCE_ID, String.valueOf(source.getSourceId()));
SimpleJobBuilder generateIrJob = generationUtils.buildJobForCohortBasedAnalysisTasklet(
GENERATE_IR_ANALYSIS,
source,
builder,
getSourceJdbcTemplate(source),
chunkContext -> {
Integer irId = Integer.valueOf(chunkContext.getStepContext().getJobParameters().get(ANALYSIS_ID).toString());
IncidenceRateAnalysis ir = this.irAnalysisRepository.findOne(irId);
IncidenceRateAnalysisExpression expression = Utils.deserialize(ir.getDetails().getExpression(), IncidenceRateAnalysisExpression.class);
return Stream.concat(
expression.targetIds.stream(),
expression.outcomeIds.stream()
).map(id -> cohortDefinitionRepository.findOneWithDetail(id))
.collect(Collectors.toList());
},
new IRAnalysisTasklet(getSourceJdbcTemplate(source), getTransactionTemplate(), irAnalysisRepository, sourceService, queryBuilder, objectMapper)
);
generateIrJob.listener(new IRAnalysisInfoListener(getTransactionTemplate(), irAnalysisRepository));
final JobParameters jobParameters = builder.toJobParameters();
return jobService.runJob(generateIrJob.build(), jobParameters);
}
@Override
public void cancelAnalysis(int analysisId, String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
jobService.cancelJobExecution(j -> {
JobParameters jobParameters = j.getJobParameters();
String jobName = j.getJobInstance().getJobName();
return Objects.equals(jobParameters.getString(ANALYSIS_ID), String.valueOf(analysisId))
&& Objects.equals(jobParameters.getString(SOURCE_ID), String.valueOf(source.getSourceId()))
&& Objects.equals(NAME, jobName);
});
}
@Override
@Transactional(readOnly = true)
public List<AnalysisInfoDTO> getAnalysisInfo(final int id) {
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
return executionInfoList.stream().map(ei -> {
AnalysisInfoDTO info = new AnalysisInfoDTO();
info.setExecutionInfo(ei);
return info;
}).collect(Collectors.toList());
}
@Override
@DataSourceAccess
@Transactional(readOnly = true)
public AnalysisInfoDTO getAnalysisInfo(int id, @SourceKey String sourceKey) {
Source source = sourceService.findBySourceKey(sourceKey);
ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey));
sourceAccessor.checkAccess(source);
AnalysisInfoDTO info = new AnalysisInfoDTO();
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
info.setExecutionInfo(executionInfoList.stream().filter(i -> Objects.equals(i.getSource(), source))
.findFirst().orElse(null));
try{
if (Objects.nonNull(info.getExecutionInfo()) && Objects.equals(info.getExecutionInfo().getStatus(), GenerationStatus.COMPLETE)
&& info.getExecutionInfo().getIsValid()) {
info.setSummaryList(getAnalysisSummaryList(id, source));
}
}catch (Exception e) {
log.error("Error getting IR Analysis summary list", e);
throw new InternalServerErrorException();
}
return info;
}
@Override
@Transactional
public AnalysisReport getAnalysisReport(final int id, final String sourceKey, final int targetId, final int outcomeId ) {
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
AnalysisReport.Summary summary = IterableUtils.find(getAnalysisSummaryList(id, source), summary12 -> ((summary12.targetId == targetId) && (summary12.outcomeId == outcomeId)));
Collection<AnalysisReport.StrataStatistic> strataStats = CollectionUtils.select(getStrataStatistics(id, source),
summary1 -> ((summary1.targetId == targetId) && (summary1.outcomeId == outcomeId)));
String treemapData = getStrataTreemapData(id, targetId, outcomeId, strataStats.size(), source);
AnalysisReport report = new AnalysisReport();
report.summary = summary;
report.stratifyStats = new ArrayList<>(strataStats);
report.treemapData = treemapData;
return report;
}
@Override
public GenerateSqlResult generateSql(GenerateSqlRequest request) {
IRAnalysisQueryBuilder.BuildExpressionQueryOptions options = request.options;
GenerateSqlResult result = new GenerateSqlResult();
if (options == null) {
options = new IRAnalysisQueryBuilder.BuildExpressionQueryOptions();
}
String expressionSql = queryBuilder.buildAnalysisQuery(request.expression, request.analysisId, options);
result.templateSql = SqlRender.renderSql(expressionSql, null, null);
return result;
}
@Override
public CheckResult runDiagnostics(IRAnalysisDTO irAnalysisDTO){
return new CheckResult(checker.check(irAnalysisDTO));
}
@Override
@Transactional
public IRAnalysisDTO copy(final int id) {
IRAnalysisDTO analysis = getAnalysis(id);
analysis.setTags(null);
analysis.setId(null); // clear the ID
analysis.setName(getNameForCopy(analysis.getName()));
return createAnalysis(analysis);
}
@Override
@Transactional
public Response export(final int id) {
Response response = null;
Map<String, String> fileList = new HashMap<>();
Map<Integer, String> distTypeLookup = ImmutableMap.of(1, "TAR", 2, "TTO");
try {
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
Set<ExecutionInfo> executions = analysis.getExecutionInfoList();
fileList.put("analysisDefinition.json", analysis.getDetails().getExpression());
// squentially return reults of IR calculation. In Spring 1.4.2, we can utlilize @Async operations to do this in parallel.
// store results in single CSV file
ArrayList<String[]> summaryLines = new ArrayList<>();
ArrayList<String[]> strataLines = new ArrayList<>();
ArrayList<String[]> distLines = new ArrayList<>();
executions = executions.stream().filter(e -> this.isSourceAvailable(e.getSource())).collect(Collectors.toSet());
for (ExecutionInfo execution : executions)
{
Source source = execution.getSource();
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
// get the summary data
List<AnalysisReport.Summary> summaryList = getAnalysisSummaryList(id, source);
if (summaryLines.isEmpty())
{
summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.Summary summary : summaryList)
{
summaryLines.add(new String[] {source.getSourceKey(),String.valueOf(summary.targetId), String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons), String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases)});
}
// get the strata results
List<AnalysisReport.StrataStatistic> strataList = getStrataStatistics(id, source);
if (strataLines.isEmpty())
{
strataLines.add("db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.StrataStatistic strata : strataList)
{
strataLines.add(new String[] {source.getSourceKey(),String.valueOf(strata.targetId), String.valueOf(strata.outcomeId),String.valueOf(strata.id), String.valueOf(strata.name), String.valueOf(strata.totalPersons), String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases)});
}
// get the distribution data
String distQuery = String.format("select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d", source.getSourceKey(), resultsTableQualifier, id);
String translatedSql = SqlTranslate.translateSql(distQuery, source.getSourceDialect(), SessionUtils.sessionId(), resultsTableQualifier);
this.getSourceJdbcTemplate(source).query(translatedSql, resultSet -> {
if (distLines.isEmpty()) {
ArrayList<String> columnNames = new ArrayList<>();
for(int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) {
columnNames.add(resultSet.getMetaData().getColumnName(i));
}
distLines.add(columnNames.toArray(new String[0]));
}
ArrayList<String> columnValues = new ArrayList<>();
for(int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) {
switch (resultSet.getMetaData().getColumnName(i)) {
case "dist_type":
columnValues.add(distTypeLookup.get(resultSet.getInt(i)));
break;
default:
columnValues.add(resultSet.getString(i));
break;
}
}
distLines.add(columnValues.toArray(new String[0]));
});
}
// Write report lines to CSV
StringWriter sw = null;
CSVWriter csvWriter = null;
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(summaryLines);
csvWriter.flush();
fileList.put("ir_summary.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(strataLines);
csvWriter.flush();
fileList.put("ir_strata.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(distLines);
csvWriter.flush();
fileList.put("ir_dist.csv", sw.getBuffer().toString());
// build zip output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
for(String fileName : fileList.keySet())
{
ZipEntry resultsEntry = new ZipEntry(fileName);
zos.putNextEntry(resultsEntry);
zos.write(fileList.get(fileName).getBytes());
}
zos.closeEntry();
zos.close();
baos.flush();
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/DDLService.java | src/main/java/org/ohdsi/webapi/service/DDLService.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Authors: Maria Pozhidaeva
*
*/
package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.service.SqlRenderService.translateSQL;
import com.odysseusinc.arachne.commons.types.DBMSType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import org.apache.commons.lang3.ObjectUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.webapi.sqlrender.SourceStatement;
import org.ohdsi.webapi.sqlrender.TranslatedStatement;
import org.ohdsi.webapi.util.SessionUtils;
import org.springframework.stereotype.Component;
@Path("/ddl/")
@Component
public class DDLService {
public static final String VOCAB_SCHEMA = "vocab_schema";
public static final String RESULTS_SCHEMA = "results_schema";
public static final String CEM_SCHEMA = "cem_results_schema";
public static final String TEMP_SCHEMA = "oracle_temp_schema";
private static final Collection<String> RESULT_DDL_FILE_PATHS = Arrays.asList(
// cohort generation results
"/ddl/results/cohort.sql",
"/ddl/results/cohort_censor_stats.sql",
"/ddl/results/cohort_inclusion.sql",
"/ddl/results/cohort_inclusion_result.sql",
"/ddl/results/cohort_inclusion_stats.sql",
"/ddl/results/cohort_summary_stats.sql",
// cohort generation cache
"/ddl/results/cohort_cache.sql",
"/ddl/results/cohort_censor_stats_cache.sql",
"/ddl/results/cohort_inclusion_result_cache.sql",
"/ddl/results/cohort_inclusion_stats_cache.sql",
"/ddl/results/cohort_summary_stats_cache.sql",
// cohort feasibility analysis
"/ddl/results/feas_study_inclusion_stats.sql",
"/ddl/results/feas_study_index_stats.sql",
"/ddl/results/feas_study_result.sql",
// cohort reports (heracles)
"/ddl/results/heracles_analysis.sql",
"/ddl/results/heracles_heel_results.sql",
"/ddl/results/heracles_results.sql",
"/ddl/results/heracles_results_dist.sql",
"/ddl/results/heracles_periods.sql",
// cohort sampling
"/ddl/results/cohort_sample_element.sql",
// incidence rates
"/ddl/results/ir_analysis_dist.sql",
"/ddl/results/ir_analysis_result.sql",
"/ddl/results/ir_analysis_strata_stats.sql",
"/ddl/results/ir_strata.sql",
// characterization
"/ddl/results/cohort_characterizations.sql",
// pathways
"/ddl/results/pathway_analysis_codes.sql",
"/ddl/results/pathway_analysis_events.sql",
"/ddl/results/pathway_analysis_paths.sql",
"/ddl/results/pathway_analysis_stats.sql"
);
private static final String INIT_HERACLES_PERIODS = "/ddl/results/init_heracles_periods.sql";
public static final Collection<String> RESULT_INIT_FILE_PATHS = Arrays.asList(
"/ddl/results/init_heracles_analysis.sql", INIT_HERACLES_PERIODS
);
public static final Collection<String> HIVE_RESULT_INIT_FILE_PATHS = Arrays.asList(
"/ddl/results/init_hive_heracles_analysis.sql", INIT_HERACLES_PERIODS
);
public static final Collection<String> INIT_CONCEPT_HIERARCHY_FILE_PATHS = Arrays.asList(
"/ddl/results/concept_hierarchy.sql",
"/ddl/results/init_concept_hierarchy.sql"
);
private static final Collection<String> RESULT_INDEX_FILE_PATHS = Arrays.asList(
"/ddl/results/create_index.sql",
"/ddl/results/pathway_analysis_events_indexes.sql"
);
private static final Collection<String> CEMRESULT_DDL_FILE_PATHS = Arrays.asList(
"/ddl/cemresults/nc_results.sql"
);
public static final Collection<String> CEMRESULT_INIT_FILE_PATHS = Arrays.asList();
private static final Collection<String> CEMRESULT_INDEX_FILE_PATHS = Arrays.asList();
private static final Collection<String> ACHILLES_DDL_FILE_PATHS = Arrays.asList(
"/ddl/achilles/achilles_result_concept_count.sql"
);
private static final Collection<String> DBMS_NO_INDEXES = Arrays.asList("redshift", "impala", "netezza", "spark");
/**
* Get DDL for results schema
* @param dialect SQL dialect (e.g. sql server)
* @param vocabSchema
* @param resultSchema
* @param initConceptHierarchy
* @param tempSchema
* @return SQL to create tables in results schema
*/
@GET
@Path("results")
@Produces("text/plain")
public String generateResultSQL(
@QueryParam("dialect") String dialect,
@DefaultValue("vocab") @QueryParam("vocabSchema") String vocabSchema,
@DefaultValue("results") @QueryParam("schema") String resultSchema,
@DefaultValue("true") @QueryParam("initConceptHierarchy") Boolean initConceptHierarchy,
@QueryParam("tempSchema") String tempSchema) {
Collection<String> resultDDLFilePaths = new ArrayList<>(RESULT_DDL_FILE_PATHS);
if (initConceptHierarchy) {
resultDDLFilePaths.addAll(INIT_CONCEPT_HIERARCHY_FILE_PATHS);
}
String oracleTempSchema = ObjectUtils.firstNonNull(tempSchema, resultSchema);
Map<String, String> params = new HashMap<String, String>() {{
put(VOCAB_SCHEMA, vocabSchema);
put(RESULTS_SCHEMA, resultSchema);
put(TEMP_SCHEMA, oracleTempSchema);
}};
return generateSQL(dialect, params, resultDDLFilePaths, getResultInitFilePaths(dialect), RESULT_INDEX_FILE_PATHS);
}
private Collection<String> getResultInitFilePaths(String dialect) {
if (Objects.equals(DBMSType.HIVE.getOhdsiDB(), dialect)) {
return HIVE_RESULT_INIT_FILE_PATHS;
} else {
return RESULT_INIT_FILE_PATHS;
}
}
/**
* Get DDL for Common Evidence Model results schema
* @param dialect SQL dialect
* @param schema schema name
* @return SQL
*/
@GET
@Path("cemresults")
@Produces("text/plain")
public String generateCemResultSQL(@QueryParam("dialect") String dialect, @DefaultValue("cemresults") @QueryParam("schema") String schema) {
Map<String, String> params = new HashMap<String, String>() {{
put(CEM_SCHEMA, schema);
}};
return generateSQL(dialect, params, CEMRESULT_DDL_FILE_PATHS, CEMRESULT_INIT_FILE_PATHS, CEMRESULT_INDEX_FILE_PATHS);
}
/**
* Get DDL for Achilles results tables
* @param dialect SQL dialect
* @param vocabSchema OMOP vocabulary schema
* @param resultSchema results schema
* @return SQL
*/
@GET
@Path("achilles")
@Produces("text/plain")
public String generateAchillesSQL(
@QueryParam("dialect") String dialect,
@DefaultValue("vocab") @QueryParam("vocabSchema") String vocabSchema,
@DefaultValue("results") @QueryParam("schema") String resultSchema) {
final Collection<String> achillesDDLFilePaths = new ArrayList<>(ACHILLES_DDL_FILE_PATHS);
Map<String, String> params = new HashMap<String, String>() {{
put(VOCAB_SCHEMA, vocabSchema);
put(RESULTS_SCHEMA, resultSchema);
}};
return generateSQL(dialect, params, achillesDDLFilePaths, Collections.emptyList(), Collections.emptyList());
}
private String generateSQL(String dialect, Map<String, String> params, Collection<String> filePaths, Collection<String> initFilePaths, Collection<String> indexFilePaths) {
StringBuilder sqlBuilder = new StringBuilder();
for (String fileName : filePaths) {
sqlBuilder.append("\n").append(ResourceHelper.GetResourceAsString(fileName));
}
for (String fileName : initFilePaths) {
sqlBuilder.append("\n").append(ResourceHelper.GetResourceAsString(fileName));
}
if (dialect == null || DBMS_NO_INDEXES.stream().noneMatch(dbms -> dbms.equals(dialect.toLowerCase()))) {
for (String fileName : indexFilePaths) {
sqlBuilder.append("\n").append(ResourceHelper.GetResourceAsString(fileName));
}
}
String result = sqlBuilder.toString();
if (dialect != null) {
result = translateSqlFile(result, dialect, params);
}
return result.replaceAll(";", ";\n");
}
private String translateSqlFile(String sql, String dialect, Map<String, String> params) {
SourceStatement statement = new SourceStatement();
statement.setTargetDialect(dialect.toLowerCase()) ;
statement.setOracleTempSchema(params.get(TEMP_SCHEMA));
statement.setSql(sql);
statement.getParameters().putAll(params);
TranslatedStatement translatedStatement = translateSQL(statement);
return translatedStatement.getTargetSQL();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/ShinyService.java | src/main/java/org/ohdsi/webapi/service/ShinyService.java | package org.ohdsi.webapi.service;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.shiny.ApplicationBrief;
import org.ohdsi.webapi.shiny.PackagingStrategies;
import org.ohdsi.webapi.shiny.PackagingStrategy;
import org.ohdsi.webapi.shiny.ShinyPackagingService;
import org.ohdsi.webapi.shiny.ShinyPublishedEntity;
import org.ohdsi.webapi.shiny.ShinyPublishedRepository;
import org.ohdsi.webapi.shiny.TemporaryFile;
import org.ohdsi.webapi.shiny.posit.PositConnectClient;
import org.ohdsi.webapi.shiny.posit.TagMapper;
import org.ohdsi.webapi.shiny.posit.dto.AddTagRequest;
import org.ohdsi.webapi.shiny.posit.dto.ContentItemResponse;
import org.ohdsi.webapi.shiny.posit.dto.TagMetadata;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.PermissionManager;
import org.ohdsi.webapi.shiro.management.Security;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
import javax.ws.rs.NotFoundException;
import java.sql.Date;
import java.text.MessageFormat;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Collectors;
@Component
@ConditionalOnProperty(name = "shiny.enabled", havingValue = "true")
public class ShinyService {
private static final Logger log = LoggerFactory.getLogger(ShinyService.class);
private final Map<CommonAnalysisType, ShinyPackagingService> servicesMap;
@Autowired
private ShinyPublishedRepository shinyPublishedRepository;
@Autowired
private PermissionManager permissionManager;
@Autowired
private PositConnectClient connectClient;
@Autowired
protected Security security;
@Autowired
protected UserRepository userRepository;
@Autowired
private TagMapper tagMapper;
@Value("#{!'${security.provider}'.equals('DisabledSecurity')}")
private boolean securityEnabled;
@Inject
public ShinyService(List<ShinyPackagingService> services) {
servicesMap = services.stream().collect(Collectors.toMap(ShinyPackagingService::getType, Function.identity()));
}
public void publishApp(String type, int id, String sourceKey) {
TemporaryFile data = packageShinyApp(type, id, sourceKey, PackagingStrategies.targz());
ShinyPublishedEntity publication = getPublication(id, sourceKey);
ShinyPackagingService service = findShinyService(CommonAnalysisType.valueOf(type.toUpperCase()));
UUID contentId = Optional.ofNullable(publication.getContentId())
.orElseGet(() -> findOrCreateItem(service.getBrief(id, sourceKey)));
String bundleId = connectClient.uploadBundle(contentId, data);
String taskId = connectClient.deployBundle(contentId, bundleId);
enrichPublicationWithAtlasTag(contentId, type);
log.debug("Bundle [{}] is deployed to Shiny server, task id: [{}]", id, taskId);
}
private void enrichPublicationWithAtlasTag(UUID contentId, String type) {
try {
String expectedPositTagName = tagMapper.getPositTagNameForAnalysisType(CommonAnalysisType.valueOf(type.toUpperCase()));
List<TagMetadata> existingPositTags = connectClient.listTags();
log.info("Resolved [{}] tags from Posit server, enriching contentId [{}] of type [{}] and expected Posit tag name [{}]", existingPositTags.size(), contentId, type, expectedPositTagName);
TagMetadata tagMetadata = existingPositTags.stream()
.filter(Objects::nonNull)
.filter(metadata -> Objects.nonNull(metadata.getName()))
.filter(metadata -> StringUtils.trim(metadata.getName()).equals(StringUtils.trim(expectedPositTagName)))
.findFirst()
.orElseThrow(() -> new IllegalStateException(String.format("Could not find tag metadata on Posit server for expected tag name: %s and type: %s", expectedPositTagName, type)));
log.debug("Resolved tag metadata for Posit tag: {}, tag id: {}", tagMetadata.getName(), tagMetadata.getId());
connectClient.addTagToContent(contentId, new AddTagRequest(tagMetadata.getId()));
} catch (Exception e) {
log.error("Could not enrich the published contentId {} of type {} with an atlas tag", contentId, type, e);
}
}
private UUID findOrCreateItem(ApplicationBrief brief) {
Optional<UUID> contentItemUUID = fetchContentItemUUIDIfExists(brief.getName());
if (contentItemUUID.isPresent()) {
log.info("Content item [{}] already exists, will update", brief.getName());
return contentItemUUID.get();
} else {
return connectClient.createContentItem(brief);
}
}
private Optional<UUID> fetchContentItemUUIDIfExists(String itemName) {
return connectClient.listContentItems().stream()
.filter(i -> Objects.equals(i.getName(), itemName))
.findFirst()
.map(ContentItemResponse::getGuid);
}
private ShinyPublishedEntity getPublication(int id, String sourceKey) {
return shinyPublishedRepository.findByAnalysisIdAndSourceKey(Integer.toUnsignedLong(id), sourceKey).orElseGet(() -> {
ShinyPublishedEntity entity = new ShinyPublishedEntity();
entity.setAnalysisId(Integer.toUnsignedLong(id));
entity.setSourceKey(sourceKey);
entity.setCreatedBy(securityEnabled ? permissionManager.getCurrentUser() : userRepository.findByLogin(security.getSubject()));
entity.setCreatedDate(Date.from(Instant.now()));
return entity;
});
}
public TemporaryFile packageShinyApp(String type, int id, String sourceKey, PackagingStrategy packaging) {
CommonAnalysisType analysisType = CommonAnalysisType.valueOf(type.toUpperCase());
ShinyPackagingService service = findShinyService(analysisType);
return service.packageApp(id, sourceKey, packaging);
}
private ShinyPackagingService findShinyService(CommonAnalysisType type) {
return Optional.ofNullable(servicesMap.get(type))
.orElseThrow(() -> new NotFoundException(MessageFormat.format("Shiny application download is not supported for [{0}] analyses.", type)));
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/SqlRenderService.java | src/main/java/org/ohdsi/webapi/service/SqlRenderService.java | package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.Constants.DEFAULT_DIALECT;
import static org.ohdsi.webapi.Constants.SqlSchemaPlaceholders.TEMP_DATABASE_SCHEMA_PLACEHOLDER;
import java.util.Collections;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.sqlrender.SourceStatement;
import org.ohdsi.webapi.sqlrender.TranslatedStatement;
import org.ohdsi.webapi.util.SessionUtils;
/**
*
* @author Lee Evans
*/
@Path("/sqlrender/")
public class SqlRenderService {
/**
* Translate an OHDSI SQL to a supported target SQL dialect
* @param sourceStatement JSON with parameters, source SQL, and target dialect
* @return rendered and translated SQL
*/
@Path("translate")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public TranslatedStatement translateSQLFromSourceStatement(SourceStatement sourceStatement) {
if (sourceStatement == null) {
return new TranslatedStatement();
}
sourceStatement.setOracleTempSchema(TEMP_DATABASE_SCHEMA_PLACEHOLDER);
return translatedStatement(sourceStatement);
}
public TranslatedStatement translatedStatement(SourceStatement sourceStatement) {
return translateSQL(sourceStatement);
}
public static TranslatedStatement translateSQL(SourceStatement sourceStatement) {
TranslatedStatement translated = new TranslatedStatement();
if (sourceStatement == null) {
return translated;
}
try {
Map<String, String> parameters = sourceStatement.getParameters() == null ? Collections.emptyMap() : sourceStatement.getParameters();
String renderedSQL = SqlRender.renderSql(
sourceStatement.getSql(),
parameters.keySet().toArray(new String[0]),
parameters.values().toArray(new String[0]));
translated.setTargetSQL(translateSql( sourceStatement, renderedSQL));
return translated;
} catch (Exception exception) {
throw new RuntimeException(exception);
}
}
private static String translateSql(SourceStatement sourceStatement, String renderedSQL) {
if (StringUtils.isEmpty(sourceStatement.getTargetDialect()) || DEFAULT_DIALECT.equals(sourceStatement.getTargetDialect())) {
return renderedSQL;
}
return SqlTranslate.translateSql(renderedSQL, sourceStatement.getTargetDialect(), SessionUtils.sessionId(), sourceStatement.getOracleTempSchema());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortAnalysisService.java | src/main/java/org/ohdsi/webapi/service/CohortAnalysisService.java | package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.List;
import java.util.stream.Collectors;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import org.ohdsi.sql.SqlSplit;
import org.ohdsi.webapi.cohortanalysis.*;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortresults.VisualizationDataRepository;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.model.results.Analysis;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
/**
* REST Services related to running
* cohort analysis (a.k.a Heracles) analyses.
* More information on the Heracles project
* can be found at {@link https://www.ohdsi.org/web/wiki/doku.php?id=documentation:software:heracles}.
* The implementation found in WebAPI represents a migration of the functionality
* from the stand-alone HERACLES application to integrate it into WebAPI and
* ATLAS.
*
* @summary Cohort Analysis (a.k.a Heracles)
*/
@Path("/cohortanalysis/")
@Component
public class CohortAnalysisService extends AbstractDaoService implements GeneratesNotification {
public static final String NAME = "cohortAnalysisJob";
@Value("${heracles.smallcellcount}")
private String smallCellCount;
private final JobTemplate jobTemplate;
private final CohortDefinitionService definitionService;
private final CohortDefinitionRepository cohortDefinitionRepository;
private final VisualizationDataRepository visualizationDataRepository;
private final HeraclesQueryBuilder heraclesQueryBuilder;
private ObjectMapper objectMapper;
private final RowMapper<Analysis> analysisMapper = new RowMapper<Analysis>() {
@Override
public Analysis mapRow(ResultSet rs, int rowNum) throws SQLException {
Analysis analysis = new Analysis();
mapAnalysis(analysis, rs, rowNum);
return analysis;
}
};
private final RowMapper<CohortAnalysis> cohortAnalysisMapper = new RowMapper<CohortAnalysis>() {
@Override
public CohortAnalysis mapRow(final ResultSet rs, final int rowNum) throws SQLException {
final CohortAnalysis cohortAnalysis = new CohortAnalysis();
mapAnalysis(cohortAnalysis, rs, rowNum);
cohortAnalysis.setAnalysisComplete(rs.getInt(CohortAnalysis.ANALYSIS_COMPLETE) == 1);
cohortAnalysis.setCohortDefinitionId(rs.getInt(CohortAnalysis.COHORT_DEFINITION_ID));
cohortAnalysis.setLastUpdateTime(rs.getTimestamp(CohortAnalysis.LAST_UPDATE_TIME));
return cohortAnalysis;
}
};
public CohortAnalysisService(JobTemplate jobTemplate,
CohortDefinitionService definitionService,
CohortDefinitionRepository cohortDefinitionRepository,
VisualizationDataRepository visualizationDataRepository,
ObjectMapper objectMapper,
HeraclesQueryBuilder heraclesQueryBuilder) {
this.jobTemplate = jobTemplate;
this.definitionService = definitionService;
this.cohortDefinitionRepository = cohortDefinitionRepository;
this.visualizationDataRepository = visualizationDataRepository;
this.objectMapper = objectMapper;
this.heraclesQueryBuilder = heraclesQueryBuilder;
}
private void mapAnalysis(final Analysis analysis, final ResultSet rs, final int rowNum) throws SQLException {
analysis.setAnalysisId(rs.getInt(Analysis.ANALYSIS_ID));
analysis.setAnalysisName(rs.getString(Analysis.ANALYSIS_NAME));
analysis.setStratum1Name(rs.getString(Analysis.STRATUM_1_NAME));
analysis.setStratum2Name(rs.getString(Analysis.STRATUM_2_NAME));
analysis.setStratum3Name(rs.getString(Analysis.STRATUM_3_NAME));
analysis.setStratum4Name(rs.getString(Analysis.STRATUM_4_NAME));
analysis.setStratum5Name(rs.getString(Analysis.STRATUM_5_NAME));
analysis.setAnalysisType(rs.getString(Analysis.ANALYSIS_TYPE));
}
/**
* Returns all cohort analyses in the WebAPI database
*
* @summary Get all cohort analyses
* @return List of all cohort analyses
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<Analysis> getCohortAnalyses() {
String sqlPath = "/resources/cohortanalysis/sql/getCohortAnalyses.sql";
String search = "ohdsi_database_schema";
String replace = getOhdsiSchema();
PreparedStatementRenderer psr = new PreparedStatementRenderer(null, sqlPath, search, replace);
return getJdbcTemplate().query(psr.getSql(), psr.getSetter(), this.analysisMapper);
}
/**
* Returns all cohort analyses in the WebAPI database
* for the given cohort_definition_id
*
* @summary Get cohort analyses by cohort ID
* @param id The cohort definition identifier
* @return List of all cohort analyses and their statuses
* for the given cohort_definition_id
*/
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
public List<CohortAnalysis> getCohortAnalysesForCohortDefinition(@PathParam("id") final int id) {
String sqlPath = "/resources/cohortanalysis/sql/getCohortAnalysesForCohort.sql";
String tqName = "ohdsi_database_schema";
String tqValue = getOhdsiSchema();
PreparedStatementRenderer psr = new PreparedStatementRenderer(null, sqlPath, tqName, tqValue, "cohortDefinitionId", whitelist(id), SessionUtils.sessionId());
return getJdbcTemplate().query(psr.getSql(), psr.getSetter(), this.cohortAnalysisMapper);
}
/**
* Returns the summary for the cohort
*
* @summary Cohort analysis summary
* @param id - the cohort_definition id
* @return Summary which includes the base cohort_definition, the cohort analyses list and their
* statuses for this cohort, and a base set of common cohort results that may or may not
* yet have been ran
*/
@GET
@Path("/{id}/summary")
@Produces(MediaType.APPLICATION_JSON)
public CohortSummary getCohortSummary(@PathParam("id") final int id) {
CohortSummary summary = new CohortSummary();
try {
summary.setCohortDefinition(this.definitionService.getCohortDefinition(whitelist(id)));
summary.setAnalyses(this.getCohortAnalysesForCohortDefinition(id));
} catch (Exception e) {
log.error("unable to get cohort summary", e);
}
return summary;
}
/**
* Generates a preview of the cohort analysis SQL used to run
* the Cohort Analysis Job
*
* @summary Cohort analysis SQL preview
* @param task - the CohortAnalysisTask, be sure to have a least one
* analysis_id and one cohort_definition id
* @return - SQL for the given CohortAnalysisTask translated and rendered to
* the current dialect
*/
@POST
@Path("/preview")
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.APPLICATION_JSON)
public String getRunCohortAnalysisSql(CohortAnalysisTask task) {
task.setSmallCellCount(Integer.parseInt(this.smallCellCount));
return heraclesQueryBuilder.buildHeraclesAnalysisQuery(task);
}
/**
* Generates a preview of the cohort analysis SQL to be ran for the Cohort
* Analysis Job to an array of strings, so that it can be used in batch mode.
*
* @summary Run cohort analysis SQL batch
* @param task The CohortAnalysisTask object
* @return The SQL statements or NULL if there is no task specified
*/
public String[] getRunCohortAnalysisSqlBatch(CohortAnalysisTask task) {
if (task != null) {
task.setSmallCellCount(Integer.parseInt(this.smallCellCount));
String sql = this.getRunCohortAnalysisSql(task);
String[] stmts = null;
if (log.isDebugEnabled()) {
stmts = SqlSplit.splitSql(sql);
for (int x = 0; x < stmts.length; x++) {
log.debug("Split SQL {} : {}", x, stmts[x]);
}
}
return stmts;
}
return null;
}
/**
* Queues up a cohort analysis task, that generates and translates SQL for the
* given cohort definitions, analysis ids and concept ids
*
* @summary Queue cohort analysis job
* @param task The cohort analysis task to be ran
* @return information about the Cohort Analysis Job
* @throws Exception
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource queueCohortAnalysisJob(CohortAnalysisTask task) throws Exception {
if (task == null) {
return null;
}
// source key comes from the client, we look it up here and hand it off to the tasklet
Source source = getSourceRepository().findBySourceKey(task.getSourceKey());
task.setSource(source);
task.setSmallCellCount(Integer.parseInt(this.smallCellCount));
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString("sourceKey", source.getSourceKey());
builder.addString("cohortDefinitionIds", limitJobParams(Joiner.on(",").join(task.getCohortDefinitionIds())));
builder.addString("analysisIds", limitJobParams(Joiner.on(",").join(task.getAnalysisIds())));
if (task.getConditionConceptIds() != null && task.getConditionConceptIds().size() > 0) {
builder.addString("conditionIds", limitJobParams(Joiner.on(",").join(task.getConditionConceptIds())));
}
if (task.getDrugConceptIds() != null && task.getDrugConceptIds().size() > 0) {
builder.addString("drugIds", limitJobParams(Joiner.on(",").join(task.getDrugConceptIds())));
}
if (task.getMeasurementConceptIds() != null && task.getMeasurementConceptIds().size() > 0) {
builder.addString("measurementIds", limitJobParams(Joiner.on(",").join(task.getMeasurementConceptIds())));
}
if (task.getObservationConceptIds() != null && task.getObservationConceptIds().size() > 0) {
builder.addString("observationIds", limitJobParams(Joiner.on(",").join(task.getObservationConceptIds())));
}
if (task.getProcedureConceptIds() != null && task.getProcedureConceptIds().size() > 0) {
builder.addString("procedureIds", limitJobParams(Joiner.on(",").join(task.getProcedureConceptIds())));
}
if (task.isRunHeraclesHeel()) {
builder.addString("heraclesHeel", "true");
}
if (task.isCohortPeriodOnly()) {
builder.addString("cohortPeriodOnly", "true");
}
if (!StringUtils.isEmpty(task.getJobName())) {
builder.addString("jobName", limitJobParams(task.getJobName()));
}
// clear analysis IDs from the generated set
this.getTransactionTemplateRequiresNew().execute(status -> {
CohortDefinition cohortDef = this.cohortDefinitionRepository.findOne(Integer.parseInt(task.getCohortDefinitionIds().get(0)));
CohortAnalysisGenerationInfo info = cohortDef.getCohortAnalysisGenerationInfoList().stream()
.filter(a -> a.getSourceId() == task.getSource().getSourceId())
.findFirst()
.orElseGet(() -> {
CohortAnalysisGenerationInfo genInfo = new CohortAnalysisGenerationInfo();
genInfo.setSourceId(task.getSource().getSourceId());
genInfo.setCohortDefinition(cohortDef);
cohortDef.getCohortAnalysisGenerationInfoList().add(genInfo);
return genInfo;
});
List<Integer> analysisList = task.getAnalysisIds().stream().map(Integer::parseInt).collect(Collectors.toList());
info.getAnalysisIds().removeAll(analysisList);
info.setLastExecution(Calendar.getInstance().getTime());
info.setProgress(0);
this.cohortDefinitionRepository.save(cohortDef);
return null;
});
//TODO consider analysisId
final String taskString = task.toString();
final JobParameters jobParameters = builder.toJobParameters();
log.info("Beginning run for cohort analysis task: {}", taskString);
CohortAnalysisTasklet tasklet = new CohortAnalysisTasklet(task, getSourceJdbcTemplate(task.getSource()),
getTransactionTemplate(), getTransactionTemplateRequiresNew(), this.getSourceDialect(), this.visualizationDataRepository,
this.cohortDefinitionRepository, objectMapper, heraclesQueryBuilder);
return this.jobTemplate.launchTasklet(NAME, "cohortAnalysisStep", tasklet, jobParameters);
}
@Override
public String getJobName() {
return NAME;
}
@Override
public String getExecutionFoldingKey() {
return "analysisIds";
}
private String limitJobParams(String param) {
if (param.length() >= 250) {
return param.substring(0, 245) + "...";
}
return param;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortResultsService.java | src/main/java/org/ohdsi/webapi/service/CohortResultsService.java | package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.webapi.cohortanalysis.CohortAnalysis;
import org.ohdsi.webapi.cohortanalysis.CohortAnalysisTask;
import org.ohdsi.webapi.cohortanalysis.CohortSummary;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO;
import org.ohdsi.webapi.cohortresults.*;
import org.ohdsi.webapi.cohortresults.mapper.AnalysisResultsMapper;
import org.ohdsi.webapi.model.results.Analysis;
import org.ohdsi.webapi.model.results.AnalysisResults;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.ByteArrayOutputStream;
import java.sql.ResultSetMetaData;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.ws.rs.core.Response;
import org.ohdsi.webapi.person.CohortPerson;
/**
* REST Services related to retrieving
* cohort analysis (a.k.a Heracles Results) analyses results.
* More information on the Heracles project
* can be found at {@link https://www.ohdsi.org/web/wiki/doku.php?id=documentation:software:heracles}.
* The implementation found in WebAPI represents a migration of the functionality
* from the stand-alone HERACLES application to integrate it into WebAPI and
* ATLAS.
*
* @summary Cohort Analysis Results (a.k.a Heracles Results)
*/
@Path("/cohortresults")
@Component
public class CohortResultsService extends AbstractDaoService {
public static final String MIN_COVARIATE_PERSON_COUNT = "10";
public static final String MIN_INTERVAL_PERSON_COUNT = "10";
public static final String BASE_SQL_PATH = "/resources/cohortresults/sql";
@Autowired
private VisualizationDataRepository visualizationDataRepository;
@Autowired
private CohortDefinitionService cohortDefinitionService;
@Autowired
private CohortDefinitionRepository cohortDefinitionRepository;
@Autowired
private ObjectMapper mapper;
private CohortResultsAnalysisRunner queryRunner = null;
@PostConstruct
public void init() {
queryRunner = new CohortResultsAnalysisRunner(this.getSourceDialect(), this.visualizationDataRepository, mapper);
}
/**
* Queries for cohort analysis results for the given cohort definition id
*
* @summary Get results for analysis group
* @param id cohort_defintion id
* @param analysisGroup Name of the analysisGrouping under the
* /resources/cohortresults/sql/ directory
* @param analysisName Name of the analysis, currently the same name as the
* sql file under analysisGroup
* @param sourceKey the source to retrieve results
* @return List of key, value pairs
*/
@GET
@Path("{sourceKey}/{id}/raw/{analysis_group}/{analysis_name}")
@Produces(MediaType.APPLICATION_JSON)
public List<Map<String, String>> getCohortResultsRaw(@PathParam("id") final int id, @PathParam("analysis_group") final String analysisGroup,
@PathParam("analysis_name") final String analysisName,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@PathParam("sourceKey") String sourceKey) {
List<Map<String, String>> results;
String sqlPath = BASE_SQL_PATH + "/" + analysisGroup + "/" + analysisName + ".sql";
Source source = getSourceRepository().findBySourceKey(sourceKey);
try {
PreparedStatementRenderer psr = prepareGetCohortResultsRaw(id, minCovariatePersonCountParam,
minIntervalPersonCountParam, sqlPath, source);
return genericResultSetLoader(psr, source);
} catch (Exception e) {
log.error("Unable to translate sql for analysis {}", analysisName, e);
return null;
}
}
protected PreparedStatementRenderer prepareGetCohortResultsRaw(final int id,
final Integer minCovariatePersonCountParam,
final Integer minIntervalPersonCountParam, String sqlPath,
Source source) {
String resourcePath = sqlPath;
String vocabularyTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String[] searchStringNames = new String[]{"cdm_database_schema", "ohdsi_database_schema"};
String[] replacementNames = new String[]{vocabularyTableQualifier, resultsTableQualifier};
String[] variableNames = new String[]{"cohortDefinitionId", "minCovariatePersonCount", "minIntervalPersonCount"};
Object[] variableValues = new Object[]
{id, (minCovariatePersonCountParam == null ? MIN_COVARIATE_PERSON_COUNT : minCovariatePersonCountParam),
(minIntervalPersonCountParam == null ? MIN_INTERVAL_PERSON_COUNT : minIntervalPersonCountParam)};
return new PreparedStatementRenderer(source, resourcePath, searchStringNames, replacementNames, variableNames, variableValues);
}
/**
* Export the cohort analysis results to a ZIP file
*
* @summary Export cohort analysis results
* @param id The cohort ID
* @param sourceKey The source Key
* @return A response containing the .ZIP file of results
*/
@GET
@Path("{sourceKey}/{id}/export.zip")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response exportCohortResults(@PathParam("id") int id, @PathParam("sourceKey") String sourceKey) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
try {
Source source = getSourceRepository().findBySourceKey(sourceKey);
String sqlPath = BASE_SQL_PATH + "/raw/getAllResults.sql";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "cohortDefinitionId", whitelist(id));
final StringBuilder resultData = new StringBuilder();
final StringBuilder resultDistributionData = new StringBuilder();
// results export
getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet rs, int arg1) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
int colCount = metaData.getColumnCount();
for (int i = 1; i <= colCount; i++) {
if (i > 1) {
resultData.append("\t");
}
resultData.append(String.valueOf(rs.getObject(i)));
}
resultData.append("\r\n");
return null;
}
});
ZipEntry resultsEntry = new ZipEntry("cohort_" + String.valueOf(id) + "_results.tsv");
zos.putNextEntry(resultsEntry);
zos.write(resultData.toString().getBytes());
zos.closeEntry();
// result distribution export
sqlPath = BASE_SQL_PATH + "/raw/getAllResultDistributions.sql";
psr = new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "cohortDefinitionId", whitelist(id));
getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowMapper<Void>() {
@Override
public Void mapRow(ResultSet rs, int arg1) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
int colCount = metaData.getColumnCount();
for (int i = 1; i <= colCount; i++) {
if (i > 1) {
resultDistributionData.append("\t");
}
resultDistributionData.append(String.valueOf(rs.getObject(i)));
}
resultDistributionData.append("\r\n");
return null;
}
});
ZipEntry resultsDistEntry = new ZipEntry("cohort_" + String.valueOf(id) + "_results_dist.tsv");
zos.putNextEntry(resultsDistEntry);
zos.write(resultDistributionData.toString().getBytes());
zos.closeEntry();
// include cohort definition in export
CohortDTO cohortDefinition = cohortDefinitionService.getCohortDefinition(id);
ByteArrayOutputStream cohortDefinitionStream = new ByteArrayOutputStream();
mapper.writeValue(cohortDefinitionStream, cohortDefinition);
cohortDefinitionStream.flush();
ZipEntry cohortDefinitionEntry = new ZipEntry("cohort_" + String.valueOf(id) + "_definition.json");
zos.putNextEntry(cohortDefinitionEntry);
zos.write(cohortDefinitionStream.toByteArray());
zos.closeEntry();
zos.close();
baos.flush();
baos.close();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
Response response = Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.build();
return response;
}
/**
* Provides a warmup mechanism for the data visualization cache. This
* endpoint does not appear to be used and may be a hold over from the
* original HERACLES implementation
*
* @summary Warmup data visualizations
* @param task The cohort analysis task
* @return The number of report visualizations warmed
*/
@POST
@Path("/warmup")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public int warmUpVisualizationData(CohortAnalysisTask task) {
return this.queryRunner.warmupData(this.getSourceJdbcTemplate(task.getSource()), task);
}
/**
* Provides a list of cohort analysis visualizations that are completed
*
* @summary Get completed cohort analysis visualizations
* @param id The cohort ID
* @param sourceKey The source key
* @return A list of visualization keys that are complete
*/
@GET
@Path("{sourceKey}/{id}/completed")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<String> getCompletedVisualiztion(@PathParam("id") final int id,
@PathParam("sourceKey") final String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
List<VisualizationData> vizData = this.visualizationDataRepository.findByCohortDefinitionIdAndSourceId(id, source.getSourceId());
Set<String> completed = new HashSet<>();
if (CollectionUtils.isNotEmpty(vizData)) {
for (VisualizationData viz : vizData) {
completed.add(viz.getVisualizationKey());
}
}
return completed;
}
/**
* Retrieves the tornado plot
*
* @summary Get the tornado plot
* @param sourceKey The source key
* @param cohortDefinitionId The cohort definition id
* @return The tornado plot data
*/
@GET
@Path("{sourceKey}/{id}/tornado")
@Produces(MediaType.APPLICATION_JSON)
public TornadoReport getTornadoReport(@PathParam("sourceKey") final String sourceKey, @PathParam("id") final int cohortDefinitionId) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
TornadoReport tornadoReport = new TornadoReport();
tornadoReport.tornadoRecords = queryRunner.getTornadoRecords(getSourceJdbcTemplate(source), cohortDefinitionId, source);
tornadoReport.profileSamples = queryRunner.getProfileSampleRecords(getSourceJdbcTemplate(source), cohortDefinitionId, source);
return tornadoReport;
}
/**
* Queries for cohort analysis dashboard for the given cohort definition id
*
* @summary Get the dashboard
* @param id The cohort definition id
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param demographicsOnly only render gender and age
* @return CohortDashboard
*/
@GET
@Path("{sourceKey}/{id}/dashboard")
@Produces(MediaType.APPLICATION_JSON)
public CohortDashboard getDashboard(@PathParam("id") final int id,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@QueryParam("demographics_only") final boolean demographicsOnly,
@PathParam("sourceKey") final String sourceKey,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
final String key = CohortResultsAnalysisRunner.DASHBOARD;
Source source = getSourceRepository().findBySourceKey(sourceKey);
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKey(id, source.getSourceId(), key);
CohortDashboard dashboard = null;
if (refresh || data == null) {
dashboard = queryRunner.getDashboard(getSourceJdbcTemplate(source), id, source,
minCovariatePersonCountParam, minIntervalPersonCountParam, demographicsOnly, true);
} else {
try {
dashboard = mapper.readValue(data.getData(), CohortDashboard.class);
} catch (Exception e) {
log.error(whitelist(e));
}
}
return dashboard;
}
/**
* Queries for cohort analysis condition treemap results for the given cohort
* definition id
*
* @summary Get condition treemap
* @param sourceKey The source key
* @param id The cohort ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param refresh Boolean - refresh visualization data
* @return List<HierarchicalConceptRecord>
*/
@GET
@Path("{sourceKey}/{id}/condition/")
@Produces(MediaType.APPLICATION_JSON)
public List<HierarchicalConceptRecord> getConditionTreemap(@PathParam("sourceKey") String sourceKey, @PathParam("id") final int id,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
final String key = CohortResultsAnalysisRunner.CONDITION;
List<HierarchicalConceptRecord> res = null;
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKey(id, source.getSourceId(), key);
if (refresh || data == null) {
res = this.queryRunner.getConditionTreemap(this.getSourceJdbcTemplate(source), id, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
res = mapper.readValue(data.getData(), new TypeReference<List<HierarchicalConceptRecord>>() {
});
} catch (Exception e) {
log.error(whitelist(e));
}
}
return res;
}
/**
* Get the distinct person count for a cohort
*
* @summary Get distinct person count
* @param sourceKey The source key
* @param id The cohort ID
* @param refresh Boolean - refresh visualization data
* @return Distinct person count as integer
*/
@GET
@Path("{sourceKey}/{id}/distinctPersonCount/")
@Produces(MediaType.APPLICATION_JSON)
public Integer getRawDistinctPersonCount(@PathParam("sourceKey") String sourceKey,
@PathParam("id") String id,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetRawDistinctPersonCount(id, source);
Integer result = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new ResultSetExtractor<Integer>() {
@Override
public Integer extractData(ResultSet rs) throws SQLException {
while (rs.next()) {
return rs.getInt(1);
}
return null;
}
});
return result;
}
protected PreparedStatementRenderer prepareGetRawDistinctPersonCount(String id, Source source) {
String sqlPath = BASE_SQL_PATH + "/raw/getTotalDistinctPeople.sql";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String tqName = "tableQualifier";
return new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "id", Integer.valueOf(id));
}
/**
* Queries for cohort analysis condition drilldown results for the given
* cohort definition id and condition id
*
* @param sourceKey The source key
* @param id The cohort ID
* @param conditionId The condition concept ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param refresh Boolean - refresh visualization data
* @return The CohortConditionDrilldown detail object
*/
@GET
@Path("{sourceKey}/{id}/condition/{conditionId}")
@Produces(MediaType.APPLICATION_JSON)
public CohortConditionDrilldown getConditionResults(@PathParam("sourceKey") String sourceKey,
@PathParam("id") final int id,
@PathParam("conditionId") final int conditionId,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
CohortConditionDrilldown drilldown = null;
final String key = CohortResultsAnalysisRunner.CONDITION_DRILLDOWN;
Source source = getSourceRepository().findBySourceKey(sourceKey);
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKeyAndDrilldownId(id, source.getSourceId(), key, conditionId);
if (refresh || data == null) {
drilldown = this.queryRunner.getConditionResults(this.getSourceJdbcTemplate(source), id, conditionId, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
drilldown = mapper.readValue(data.getData(), CohortConditionDrilldown.class);
} catch (Exception e) {
log.error(whitelist(e));
}
}
return drilldown;
}
/**
* Queries for cohort analysis condition era treemap results for the given
* cohort definition id
*
* @param sourceKey The source key
* @param id The cohort ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param refresh Boolean - refresh visualization data
* @return List<HierarchicalConceptRecord>
*/
@GET
@Path("{sourceKey}/{id}/conditionera/")
@Produces(MediaType.APPLICATION_JSON)
public List<HierarchicalConceptRecord> getConditionEraTreemap(@PathParam("sourceKey") final String sourceKey,
@PathParam("id") final int id,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
final String key = CohortResultsAnalysisRunner.CONDITION_ERA;
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKey(id, source.getSourceId(), key);
List<HierarchicalConceptRecord> res = null;
if (data == null || refresh) {
res = this.queryRunner.getConditionEraTreemap(this.getSourceJdbcTemplate(source), id, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
res = mapper.readValue(data.getData(), new TypeReference<List<HierarchicalConceptRecord>>() {
});
} catch (Exception e) {
log.error(whitelist(e));
}
}
return res;
}
/**
* Get the completed analyses IDs for the selected cohort and source key
*
* @summary Get completed analyses IDs
* @param sourceKey The source key
* @param id The cohort ID
* @return A list of completed analysis IDs
*/
@GET
@Path("{sourceKey}/{id}/analyses")
@Produces(MediaType.APPLICATION_JSON)
public List<Integer> getCompletedAnalyses(@PathParam("sourceKey") String sourceKey, @PathParam("id") String id) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
int sourceId = source.getSourceId();
PreparedStatementRenderer psr = prepareGetCompletedAnalysis(id, sourceId);
final String sql = psr.getSql();
return this.getJdbcTemplate().query(sql, psr.getSetter(), new RowMapper<Integer>() {
@Override
public Integer mapRow(ResultSet resultSet, int arg1) throws SQLException {
return resultSet.getInt(1);
}
}
);
}
class GenerationInfoDTO {
private String sourceKey;
private Integer analysisId;
private Integer progress;
public GenerationInfoDTO() {
}
public GenerationInfoDTO(String sourceKey, Integer analysisId, Integer progress) {
this.sourceKey = sourceKey;
this.analysisId = analysisId;
this.progress = progress;
}
public String getSourceKey() {
return sourceKey;
}
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
public Integer getAnalysisId() {
return analysisId;
}
public void setAnalysisId(Integer analysisId) {
this.analysisId = analysisId;
}
public Integer getProgress() {
return progress;
}
public void setProgress(Integer progress) {
this.progress = progress;
}
}
/**
* Get the analysis generation progress
*
* @summary Get analysis progress
* @param sourceKey The source key
* @param id The cohort ID
* @return The generation progress information
*/
@GET
@Path("{sourceKey}/{id}/info")
@Produces(MediaType.APPLICATION_JSON)
public GenerationInfoDTO getAnalysisProgress(@PathParam("sourceKey") String sourceKey, @PathParam("id") Integer id) {
return getTransactionTemplateRequiresNew().execute(status -> {
org.ohdsi.webapi.cohortdefinition.CohortDefinition def = cohortDefinitionRepository.findOne(id);
Source source = getSourceRepository().findBySourceKey(sourceKey);
return def.getCohortAnalysisGenerationInfoList().stream()
.filter(cd -> Objects.equals(cd.getSourceId(), source.getSourceId()))
.findFirst().map(gen -> new GenerationInfoDTO(sourceKey, id, gen.getProgress()))
.<RuntimeException>orElseThrow(NotFoundException::new);
});
}
protected PreparedStatementRenderer prepareGetCompletedAnalysis(String id, int sourceId) {
String sqlPath = BASE_SQL_PATH + "/raw/getCompletedAnalyses.sql";
PreparedStatementRenderer psr = new PreparedStatementRenderer(getSourceRepository().findBySourceId(sourceId)
, sqlPath
, new String[]{"tableQualifier"}, new String[] { this.getOhdsiSchema()}
, new String[]{"cohort_definition_id", "source_id"}, new Object[]{Integer.valueOf(id), Integer.valueOf(sourceId)});
return psr;
}
/**
* Queries for cohort analysis condition era drilldown results for the given
* cohort definition id and condition id
*
* @summary Get condition era drilldown report
* @param id The cohort ID
* @param conditionId The condition ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param sourceKey The source key
* @param refresh Boolean - refresh visualization data
* @return The CohortConditionEraDrilldown object
*/
@GET
@Path("{sourceKey}/{id}/conditionera/{conditionId}")
@Produces(MediaType.APPLICATION_JSON)
public CohortConditionEraDrilldown getConditionEraDrilldown(@PathParam("id") final int id,
@PathParam("conditionId") final int conditionId,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@PathParam("sourceKey") final String sourceKey,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
CohortConditionEraDrilldown drilldown = null;
final String key = CohortResultsAnalysisRunner.CONDITION_ERA_DRILLDOWN;
Source source = getSourceRepository().findBySourceKey(sourceKey);
VisualizationData data = refresh ? null : this.visualizationDataRepository
.findByCohortDefinitionIdAndSourceIdAndVisualizationKeyAndDrilldownId(id, source.getSourceId(), key, conditionId);
if (refresh || data == null) {
drilldown = this.queryRunner.getConditionEraDrilldown(this.getSourceJdbcTemplate(source), id, conditionId, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
drilldown = mapper.readValue(data.getData(), CohortConditionEraDrilldown.class);
} catch (Exception e) {
log.error(whitelist(e));
}
}
return drilldown;
}
/**
* Queries for drug analysis treemap results for the given cohort
* definition id
*
* @summary Get drug treemap
* @param id The cohort ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param sourceKey The source key
* @param refresh Boolean - refresh visualization data
* @return List<HierarchicalConceptRecord>
*/
@GET
@Path("{sourceKey}/{id}/drug/")
@Produces(MediaType.APPLICATION_JSON)
public List<HierarchicalConceptRecord> getDrugTreemap(@PathParam("id") final int id,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@PathParam("sourceKey") final String sourceKey,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
final String key = CohortResultsAnalysisRunner.DRUG;
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKey(id, source.getSourceId(), key);
List<HierarchicalConceptRecord> res = null;
if (refresh || data == null) {
res = this.queryRunner.getDrugTreemap(this.getSourceJdbcTemplate(source), id, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
res = mapper.readValue(data.getData(), new TypeReference<List<HierarchicalConceptRecord>>() {
});
} catch (Exception e) {
log.error(whitelist(e));
}
}
return res;
}
/**
*
* @param id cohort_defintion id
* @param drugId drug_id (from concept)
* @return CohortDrugDrilldown
*/
/**
* Queries for cohort analysis drug drilldown results for the given cohort
* definition id and drug id
*
* @summary Get drug drilldown report
* @param id The cohort ID
* @param drugId The drug concept ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param sourceKey The source key
* @param refresh Boolean - refresh visualization data
* @return
*/
@GET
@Path("{sourceKey}/{id}/drug/{drugId}")
@Produces(MediaType.APPLICATION_JSON)
public CohortDrugDrilldown getDrugResults(@PathParam("id") final int id, @PathParam("drugId") final int drugId,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@PathParam("sourceKey") final String sourceKey,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
CohortDrugDrilldown drilldown = null;
final String key = CohortResultsAnalysisRunner.DRUG_DRILLDOWN;
Source source = getSourceRepository().findBySourceKey(sourceKey);
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKeyAndDrilldownId(id, source.getSourceId(), key, drugId);
if (refresh || data == null) {
drilldown = this.queryRunner.getDrugResults(this.getSourceJdbcTemplate(source), id, drugId, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
drilldown = mapper.readValue(data.getData(), CohortDrugDrilldown.class);
} catch (Exception e) {
log.error(whitelist(e));
}
}
return drilldown;
}
/**
* Queries for cohort analysis drug era treemap results for the given cohort
* definition id
*
* @summary Get drug era treemap report
* @param id The cohort ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param sourceKey The source key
* @param refresh Boolean - refresh visualization data
* @return List<HierarchicalConceptRecord>
*/
@GET
@Path("{sourceKey}/{id}/drugera/")
@Produces(MediaType.APPLICATION_JSON)
public List<HierarchicalConceptRecord> getDrugEraTreemap(@PathParam("id") final int id,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
@QueryParam("min_interval_person_count") final Integer minIntervalPersonCountParam,
@PathParam("sourceKey") final String sourceKey,
@DefaultValue("false") @QueryParam("refresh") boolean refresh) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
List<HierarchicalConceptRecord> res = null;
final String key = CohortResultsAnalysisRunner.DRUG_ERA;
VisualizationData data = refresh ? null : this.visualizationDataRepository.findByCohortDefinitionIdAndSourceIdAndVisualizationKey(id, source.getSourceId(), key);
if (refresh || data == null) {
res = this.queryRunner.getDrugEraTreemap(this.getSourceJdbcTemplate(source), id, minCovariatePersonCountParam, minIntervalPersonCountParam, source, true);
} else {
try {
res = mapper.readValue(data.getData(), new TypeReference<List<HierarchicalConceptRecord>>() {
});
} catch (Exception e) {
log.error(whitelist(e));
}
}
return res;
}
/**
*
* @param id cohort_defintion id
* @param drugId drug_id (from concept)
* @return CohortDrugEraDrilldown
*/
/**
* Queries for cohort analysis drug era drilldown results for the given cohort
* definition id and drug id
*
* @summary Get drug era drilldown report
* @param id The cohort ID
* @param drugId The drug concept ID
* @param minCovariatePersonCountParam The minimum number of covariates per person
* @param minIntervalPersonCountParam The minimum interval person count
* @param sourceKey The source key
* @param refresh Boolean - refresh visualization data
* @return CohortDrugEraDrilldown
*/
@GET
@Path("{sourceKey}/{id}/drugera/{drugId}")
@Produces(MediaType.APPLICATION_JSON)
public CohortDrugEraDrilldown getDrugEraResults(@PathParam("id") final int id, @PathParam("drugId") final int drugId,
@QueryParam("min_covariate_person_count") final Integer minCovariatePersonCountParam,
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortGenerationService.java | src/main/java/org/ohdsi/webapi/service/CohortGenerationService.java | package org.ohdsi.webapi.service;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.cohortcharacterization.CreateCohortTableTasklet;
import org.ohdsi.webapi.cohortcharacterization.DropCohortTableListener;
import org.ohdsi.webapi.cohortcharacterization.GenerateLocalCohortTasklet;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfoRepository;
import org.ohdsi.webapi.cohortdefinition.GenerateCohortTasklet;
import org.ohdsi.webapi.cohortdefinition.GenerationJobExecutionListener;
import org.ohdsi.webapi.common.generation.AutoremoveJobListener;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisEntityRepository;
import org.ohdsi.webapi.generationcache.GenerationCacheHelper;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender;
import org.ohdsi.webapi.util.SessionUtils;
import org.ohdsi.webapi.util.SourceUtils;
import org.ohdsi.webapi.util.TempTableCleanupManager;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.batch.repeat.exception.ExceptionHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.DependsOn;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.Objects;
import static org.ohdsi.webapi.Constants.GENERATE_COHORT;
import static org.ohdsi.webapi.Constants.Params.COHORT_CHARACTERIZATION_ID;
import static org.ohdsi.webapi.Constants.Params.COHORT_DEFINITION_ID;
import static org.ohdsi.webapi.Constants.Params.GENERATE_STATS;
import static org.ohdsi.webapi.Constants.Params.JOB_NAME;
import static org.ohdsi.webapi.Constants.Params.SESSION_ID;
import static org.ohdsi.webapi.Constants.Params.SOURCE_ID;
import static org.ohdsi.webapi.Constants.Params.TARGET_DATABASE_SCHEMA;
import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE;
import static org.ohdsi.webapi.Constants.Params.DEMOGRAPHIC_STATS;
@Component
@DependsOn("flyway")
public class CohortGenerationService extends AbstractDaoService implements GeneratesNotification {
private final CohortDefinitionRepository cohortDefinitionRepository;
private final CohortGenerationInfoRepository cohortGenerationInfoRepository;
private final JobBuilderFactory jobBuilders;
private final StepBuilderFactory stepBuilders;
private final JobService jobService;
private final SourceService sourceService;
private final GenerationCacheHelper generationCacheHelper;
private final FeAnalysisEntityRepository feAnalysisRepository;
private final SourceAwareSqlRender sourceAwareSqlRender;
private TransactionTemplate transactionTemplate;
private StepBuilderFactory stepBuilderFactory;
@Autowired
public CohortGenerationService(CohortDefinitionRepository cohortDefinitionRepository,
CohortGenerationInfoRepository cohortGenerationInfoRepository,
JobBuilderFactory jobBuilders,
StepBuilderFactory stepBuilders,
JobService jobService,
SourceService sourceService,
GenerationCacheHelper generationCacheHelper,
FeAnalysisEntityRepository feAnalysisRepository,
TransactionTemplate transactionTemplate, StepBuilderFactory stepBuilderFactory,
SourceAwareSqlRender sourceAwareSqlRender) {
this.cohortDefinitionRepository = cohortDefinitionRepository;
this.cohortGenerationInfoRepository = cohortGenerationInfoRepository;
this.jobBuilders = jobBuilders;
this.stepBuilders = stepBuilders;
this.jobService = jobService;
this.sourceService = sourceService;
this.generationCacheHelper = generationCacheHelper;
this.feAnalysisRepository = feAnalysisRepository;
this.transactionTemplate = transactionTemplate;
this.stepBuilderFactory = stepBuilderFactory;
this.sourceAwareSqlRender = sourceAwareSqlRender;
}
public JobExecutionResource generateCohortViaJob(UserEntity userEntity, CohortDefinition cohortDefinition,
Source source, boolean demographicStat) {
CohortGenerationInfo info = cohortDefinition.getGenerationInfoList().stream()
.filter(val -> Objects.equals(val.getId().getSourceId(), source.getSourceId())).findFirst()
.orElse(new CohortGenerationInfo(cohortDefinition, source.getSourceId()));
info.setCreatedBy(userEntity);
info.setIsDemographic(demographicStat);
cohortDefinition.getGenerationInfoList().add(info);
info.setStatus(GenerationStatus.PENDING)
.setStartTime(Calendar.getInstance().getTime());
cohortDefinitionRepository.save(cohortDefinition);
// the line below is essential to access the Cohort definition details in GenerateLocalCohortTasklet.generateCohort
// and avoid org.hibernate.LazyInitializationException:
// could not initialize proxy [org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails#38] - no Session
// the workaround doesn't look pure in the same time refactoring doesn't look minor
// as a lot of components are instantiated by the new operator
cohortDefinition.getDetails().getExpression();
return runGenerateCohortJob(cohortDefinition, source, demographicStat);
}
private Job buildGenerateCohortJob(CohortDefinition cohortDefinition, Source source, JobParameters jobParameters) {
log.info("Beginning generate cohort for cohort definition id: {}", cohortDefinition.getId());
GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(
getSourceJdbcTemplate(source),
getTransactionTemplate(),
generationCacheHelper,
cohortDefinitionRepository,
sourceService, feAnalysisRepository
);
ExceptionHandler exceptionHandler = new GenerationTaskExceptionHandler(new TempTableCleanupManager(getSourceJdbcTemplate(source),
getTransactionTemplate(),
source.getSourceDialect(),
jobParameters.getString(SESSION_ID),
SourceUtils.getTempQualifierOrNull(source)
));
Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort")
.tasklet(generateTasklet)
.exceptionHandler(exceptionHandler)
.build();
SimpleJobBuilder generateJobBuilder = jobBuilders.get(GENERATE_COHORT).start(generateCohortStep);
generateJobBuilder.listener(new GenerationJobExecutionListener(sourceService, cohortDefinitionRepository, this.getTransactionTemplateRequiresNew(),
this.getSourceJdbcTemplate(source)));
return generateJobBuilder.build();
}
public Job buildJobForCohortGenerationWithDemographic(
CohortDefinition cohortDefinition,
Source source,
JobParametersBuilder builder) {
JobParameters jobParameters = builder.toJobParameters();
addSessionParams(builder, jobParameters.getString(SESSION_ID));
CreateCohortTableTasklet createCohortTableTasklet = new CreateCohortTableTasklet(getSourceJdbcTemplate(source), transactionTemplate, sourceService, sourceAwareSqlRender);
Step createCohortTableStep = stepBuilderFactory.get(GENERATE_COHORT + ".createCohortTable")
.tasklet(createCohortTableTasklet)
.build();
log.info("Beginning generate cohort for cohort definition id: {}", cohortDefinition.getId());
GenerateLocalCohortTasklet generateLocalCohortTasklet = new GenerateLocalCohortTasklet(
transactionTemplate,
getSourceJdbcTemplate(source),
this,
sourceService,
chunkContext -> {
return Arrays.asList(cohortDefinition);
},
generationCacheHelper,
false
);
Step generateLocalCohortStep = stepBuilderFactory.get(GENERATE_COHORT + ".generateCohort")
.tasklet(generateLocalCohortTasklet)
.build();
GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(getSourceJdbcTemplate(source),
getTransactionTemplate(), generationCacheHelper, cohortDefinitionRepository, sourceService,
feAnalysisRepository);
ExceptionHandler exceptionHandler = new GenerationTaskExceptionHandler(new TempTableCleanupManager(
getSourceJdbcTemplate(source), getTransactionTemplate(), source.getSourceDialect(),
jobParameters.getString(SESSION_ID), SourceUtils.getTempQualifierOrNull(source)));
Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort").tasklet(generateTasklet)
.exceptionHandler(exceptionHandler).build();
DropCohortTableListener dropCohortTableListener = new DropCohortTableListener(getSourceJdbcTemplate(source), transactionTemplate, sourceService, sourceAwareSqlRender);
SimpleJobBuilder generateJobBuilder = jobBuilders.get(GENERATE_COHORT)
.start(createCohortTableStep)
.next(generateLocalCohortStep)
.next(generateCohortStep)
.listener(dropCohortTableListener);
generateJobBuilder.listener(new GenerationJobExecutionListener(sourceService, cohortDefinitionRepository, this.getTransactionTemplateRequiresNew(),
this.getSourceJdbcTemplate(source)));
return generateJobBuilder.build();
}
protected void addSessionParams(JobParametersBuilder builder, String sessionId) {
builder.addString(TARGET_TABLE, GenerationUtils.getTempCohortTableName(sessionId));
}
private JobExecutionResource runGenerateCohortJob(CohortDefinition cohortDefinition, Source source,
Boolean demographic) {
final JobParametersBuilder jobParametersBuilder = getJobParametersBuilder(source, cohortDefinition);
if (demographic != null && demographic) {
jobParametersBuilder.addString(DEMOGRAPHIC_STATS, Boolean.TRUE.toString());
Job job = buildJobForCohortGenerationWithDemographic(cohortDefinition, source, jobParametersBuilder);
return jobService.runJob(job, jobParametersBuilder.toJobParameters());
} else {
Job job = buildGenerateCohortJob(cohortDefinition, source, jobParametersBuilder.toJobParameters());
return jobService.runJob(job, jobParametersBuilder.toJobParameters());
}
}
private JobParametersBuilder getJobParametersBuilder(Source source, CohortDefinition cohortDefinition) {
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString(JOB_NAME, String.format("Generating cohort %d : %s (%s)", cohortDefinition.getId(), source.getSourceName(), source.getSourceKey()));
builder.addString(TARGET_DATABASE_SCHEMA, SourceUtils.getResultsQualifier(source));
builder.addString(SESSION_ID, SessionUtils.sessionId());
builder.addString(COHORT_DEFINITION_ID, String.valueOf(cohortDefinition.getId()));
builder.addString(SOURCE_ID, String.valueOf(source.getSourceId()));
builder.addString(GENERATE_STATS, Boolean.TRUE.toString());
return builder;
}
@PostConstruct
public void init(){
invalidateCohortGenerations();
}
private void invalidateCohortGenerations() {
getTransactionTemplateRequiresNew().execute(status -> {
List<CohortGenerationInfo> executions = cohortGenerationInfoRepository.findByStatusIn(INVALIDATE_STATUSES);
invalidateExecutions(executions);
cohortGenerationInfoRepository.save(executions);
return null;
});
}
@Override
public String getJobName() {
return GENERATE_COHORT;
}
@Override
public String getExecutionFoldingKey() {
return COHORT_DEFINITION_ID;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CohortSampleService.java | src/main/java/org/ohdsi/webapi/service/CohortSampleService.java | package org.ohdsi.webapi.service;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfoId;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfoRepository;
import org.ohdsi.webapi.cohortsample.CohortSamplingService;
import org.ohdsi.webapi.cohortsample.dto.CohortSampleDTO;
import org.ohdsi.webapi.cohortsample.dto.CohortSampleListDTO;
import org.ohdsi.webapi.cohortsample.dto.SampleParametersDTO;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.*;
@Path("/cohortsample")
@Component
@Produces(MediaType.APPLICATION_JSON)
public class CohortSampleService {
private final CohortDefinitionRepository cohortDefinitionRepository;
private final CohortGenerationInfoRepository generationInfoRepository;
private final CohortSamplingService samplingService;
private final SourceRepository sourceRepository;
@Autowired
public CohortSampleService(
CohortSamplingService samplingService,
SourceRepository sourceRepository,
CohortDefinitionRepository cohortDefinitionRepository,
CohortGenerationInfoRepository generationInfoRepository
) {
this.samplingService = samplingService;
this.sourceRepository = sourceRepository;
this.cohortDefinitionRepository = cohortDefinitionRepository;
this.generationInfoRepository = generationInfoRepository;
}
/**
* Get information about cohort samples for a data source
*
* @param cohortDefinitionId The id for an existing cohort definition
* @param sourceKey
* @return JSON containing information about cohort samples
*/
@Path("/{cohortDefinitionId}/{sourceKey}")
@GET
public CohortSampleListDTO listCohortSamples(
@PathParam("cohortDefinitionId") int cohortDefinitionId,
@PathParam("sourceKey") String sourceKey
) {
Source source = getSource(sourceKey);
CohortSampleListDTO result = new CohortSampleListDTO();
result.setCohortDefinitionId(cohortDefinitionId);
result.setSourceId(source.getId());
CohortGenerationInfo generationInfo = generationInfoRepository.findOne(
new CohortGenerationInfoId(cohortDefinitionId, source.getId()));
result.setGenerationStatus(generationInfo != null ? generationInfo.getStatus() : null);
result.setIsValid(generationInfo != null && generationInfo.isIsValid());
result.setSamples(this.samplingService.listSamples(cohortDefinitionId, source.getId()));
return result;
}
/**
* Get an existing cohort sample
* @param cohortDefinitionId
* @param sourceKey
* @param sampleId
* @param fields
* @return personId, gender, age of each person in the cohort sample
*/
@Path("/{cohortDefinitionId}/{sourceKey}/{sampleId}")
@GET
public CohortSampleDTO getCohortSample(
@PathParam("cohortDefinitionId") int cohortDefinitionId,
@PathParam("sourceKey") String sourceKey,
@PathParam("sampleId") Integer sampleId,
@DefaultValue("") @QueryParam("fields") String fields
) {
List<String> returnFields = Arrays.asList(fields.split(","));
boolean withRecordCounts = returnFields.contains("recordCount");
return this.samplingService.getSample(sampleId, withRecordCounts);
}
/**
* @summary Refresh a cohort sample
* Refresh a cohort sample for a given source key. This will re-sample persons from the cohort.
* @param cohortDefinitionId
* @param sourceKey
* @param sampleId
* @param fields
* @return A sample of persons from a cohort
*/
@Path("/{cohortDefinitionId}/{sourceKey}/{sampleId}/refresh")
@POST
public CohortSampleDTO refreshCohortSample(
@PathParam("cohortDefinitionId") int cohortDefinitionId,
@PathParam("sourceKey") String sourceKey,
@PathParam("sampleId") Integer sampleId,
@DefaultValue("") @QueryParam("fields") String fields
) {
List<String> returnFields = Arrays.asList(fields.split(","));
boolean withRecordCounts = returnFields.contains("recordCount");
this.samplingService.refreshSample(sampleId);
return this.samplingService.getSample(sampleId, withRecordCounts);
}
/**
* Does an existing cohort have samples?
* @param cohortDefinitionId
* @return true or false
*/
@Path("/has-samples/{cohortDefinitionId}")
@GET
public Map<String, Boolean> hasSamples(
@PathParam("cohortDefinitionId") int cohortDefinitionId
) {
int nSamples = this.samplingService.countSamples(cohortDefinitionId);
return Collections.singletonMap("hasSamples", nSamples > 0);
}
/**
* Does an existing cohort have samples from a particular source?
* @param sourceKey
* @param cohortDefinitionId
* @return true or false
*/
@Path("/has-samples/{cohortDefinitionId}/{sourceKey}")
@GET
public Map<String, Boolean> hasSamples(
@PathParam("sourceKey") String sourceKey,
@PathParam("cohortDefinitionId") int cohortDefinitionId
) {
Source source = getSource(sourceKey);
int nSamples = this.samplingService.countSamples(cohortDefinitionId, source.getId());
return Collections.singletonMap("hasSamples", nSamples > 0);
}
/**
* Create a new cohort sample
* @param sourceKey
* @param cohortDefinitionId
* @param sampleParameters
* @return
*/
@Path("/{cohortDefinitionId}/{sourceKey}")
@POST
@Consumes(MediaType.APPLICATION_JSON)
public CohortSampleDTO createCohortSample(
@PathParam("sourceKey") String sourceKey,
@PathParam("cohortDefinitionId") int cohortDefinitionId,
SampleParametersDTO sampleParameters
) {
sampleParameters.validate();
Source source = getSource(sourceKey);
if (cohortDefinitionRepository.findOne(cohortDefinitionId) == null) {
throw new NotFoundException("Cohort definition " + cohortDefinitionId + " does not exist.");
}
CohortGenerationInfo generationInfo = generationInfoRepository.findOne(
new CohortGenerationInfoId(cohortDefinitionId, source.getId()));
if (generationInfo == null || generationInfo.getStatus() != GenerationStatus.COMPLETE) {
throw new BadRequestException("Cohort is not yet generated");
}
return samplingService.createSample(source, cohortDefinitionId, sampleParameters);
}
/**
* Delete a cohort sample
* @param sourceKey
* @param cohortDefinitionId
* @param sampleId
* @return
*/
@Path("/{cohortDefinitionId}/{sourceKey}/{sampleId}")
@DELETE
public Response deleteCohortSample(
@PathParam("sourceKey") String sourceKey,
@PathParam("cohortDefinitionId") int cohortDefinitionId,
@PathParam("sampleId") int sampleId
) {
Source source = getSource(sourceKey);
if (cohortDefinitionRepository.findOne(cohortDefinitionId) == null) {
throw new NotFoundException("Cohort definition " + cohortDefinitionId + " does not exist.");
}
samplingService.deleteSample(cohortDefinitionId, source, sampleId);
return Response.status(Response.Status.NO_CONTENT).build();
}
/**
* Delete all samples for a cohort on a data source
* @param sourceKey
* @param cohortDefinitionId
* @return
*/
@Path("/{cohortDefinitionId}/{sourceKey}")
@DELETE
public Response deleteCohortSamples(
@PathParam("sourceKey") String sourceKey,
@PathParam("cohortDefinitionId") int cohortDefinitionId
) {
Source source = getSource(sourceKey);
if (cohortDefinitionRepository.findOne(cohortDefinitionId) == null) {
throw new NotFoundException("Cohort definition " + cohortDefinitionId + " does not exist.");
}
samplingService.launchDeleteSamplesTasklet(cohortDefinitionId, source.getId());
return Response.status(Response.Status.ACCEPTED).build();
}
private Source getSource(String sourceKey) {
Source source = sourceRepository.findBySourceKey(sourceKey);
if (source == null) {
throw new NotFoundException("Source " + sourceKey + " does not exist");
}
return source;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/FeasibilityService.java | src/main/java/org/ohdsi/webapi/service/FeasibilityService.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.circe.cohortdefinition.CohortExpression;
import org.ohdsi.circe.cohortdefinition.CriteriaGroup;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.feasibility.InclusionRule;
import org.ohdsi.webapi.feasibility.FeasibilityStudy;
import org.ohdsi.webapi.feasibility.PerformFeasibilityTasklet;
import org.ohdsi.webapi.feasibility.StudyGenerationInfo;
import org.ohdsi.webapi.feasibility.FeasibilityReport;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.TerminateJobStepExceptionHandler;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.cohortdefinition.ExpressionType;
import org.ohdsi.webapi.feasibility.FeasibilityStudyRepository;
import org.ohdsi.webapi.cohortdefinition.GenerateCohortTasklet;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.generationcache.GenerationCacheHelper;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.util.CancelableJdbcTemplate;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.ohdsi.webapi.util.UserUtils;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.DefaultTransactionDefinition;
/**
* REST Services related to performing a feasibility analysis but
* the implementation appears to be subsumed by the cohort definition
* services. Marking the REST methods of this
* class as deprecated.
*
* @summary Feasibility analysis (DO NOT USE)
*/
@Path("/feasibility/")
@Component
public class FeasibilityService extends AbstractDaoService {
@Autowired
private CohortDefinitionRepository cohortDefinitionRepository;
@Autowired
private FeasibilityStudyRepository feasibilityStudyRepository;
@Autowired
private JobBuilderFactory jobBuilders;
@Autowired
private StepBuilderFactory stepBuilders;
@Autowired
private JobTemplate jobTemplate;
@Autowired
private Security security;
@Autowired
private UserRepository userRepository;
@Autowired
private ObjectMapper objectMapper;
@Autowired
private GenerationCacheHelper generationCacheHelper;
@Autowired
private SourceService sourceService;
@Context
ServletContext context;
private StudyGenerationInfo findStudyGenerationInfoBySourceId(Collection<StudyGenerationInfo> infoList, Integer sourceId) {
for (StudyGenerationInfo info : infoList) {
if (info.getId().getSourceId() == sourceId) {
return info;
}
}
return null;
}
private CohortGenerationInfo findCohortGenerationInfoBySourceId(Collection<CohortGenerationInfo> infoList, Integer sourceId) {
for (CohortGenerationInfo info : infoList) {
if (info.getId().getSourceId() == sourceId) {
return info;
}
}
return null;
}
public static class FeasibilityStudyListItem {
public Integer id;
public String name;
public String description;
public String createdBy;
public Integer indexCohortId;
public Integer matchingCohortId;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm")
public Date createdDate;
public String modifiedBy;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm")
public Date modifiedDate;
}
public static class FeasibilityStudyDTO extends FeasibilityStudyListItem {
public String indexRule;
public String indexDescription;
public List<InclusionRule> inclusionRules;
}
public static class StudyInfoDTO {
public StudyGenerationInfo generationInfo;
public FeasibilityReport.Summary summary;
}
private final RowMapper<FeasibilityReport.Summary> summaryMapper = new RowMapper<FeasibilityReport.Summary>() {
@Override
public FeasibilityReport.Summary mapRow(ResultSet rs, int rowNum) throws SQLException {
FeasibilityReport.Summary summary = new FeasibilityReport.Summary();
summary.totalPersons = rs.getLong("person_count");
summary.matchingPersons = rs.getLong("match_count");
double matchRatio = (summary.totalPersons > 0) ? ((double) summary.matchingPersons / (double) summary.totalPersons) : 0.0;
summary.percentMatched = new BigDecimal(matchRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString() + "%";
return summary;
}
};
private FeasibilityReport.Summary getSimulationSummary(int id, Source source) {
String sql = "select person_count, match_count from @tableQualifier.feas_study_index_stats where study_id = @id";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", whitelist(id), SessionUtils.sessionId());
return getSourceJdbcTemplate(source).queryForObject(psr.getSql(), psr.getOrderedParams(), summaryMapper);
}
private final RowMapper<FeasibilityReport.InclusionRuleStatistic> inclusionRuleStatisticMapper = new RowMapper<FeasibilityReport.InclusionRuleStatistic>() {
@Override
public FeasibilityReport.InclusionRuleStatistic mapRow(ResultSet rs, int rowNum) throws SQLException {
FeasibilityReport.InclusionRuleStatistic statistic = new FeasibilityReport.InclusionRuleStatistic();
statistic.id = rs.getInt("rule_sequence");
statistic.name = rs.getString("name");
statistic.countSatisfying = rs.getLong("person_count");
long personTotal = rs.getLong("person_total");
long gainCount = rs.getLong("gain_count");
double excludeRatio = personTotal > 0 ? (double) gainCount / (double) personTotal : 0.0;
String percentExcluded = new BigDecimal(excludeRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString();
statistic.percentExcluded = percentExcluded + "%";
long satisfyCount = rs.getLong("person_count");
double satisfyRatio = personTotal > 0 ? (double) satisfyCount / (double) personTotal : 0.0;
String percentSatisfying = new BigDecimal(satisfyRatio * 100.0).setScale(2, RoundingMode.HALF_UP).toPlainString();
statistic.percentSatisfying = percentSatisfying + "%";
return statistic;
}
};
private String getMatchingCriteriaExpression(FeasibilityStudy p) {
if (p.getInclusionRules().size() == 0) {
throw new RuntimeException("Study must have at least 1 inclusion rule");
}
try {
// all resultRule repository objects are initalized; create 'all criteria' cohort definition from index rule + inclusion rules
ObjectMapper mapper = objectMapper.copy().setSerializationInclusion(JsonInclude.Include.NON_NULL);
CohortExpression indexRuleExpression = mapper.readValue(p.getIndexRule().getDetails().getExpression(), CohortExpression.class);
if (indexRuleExpression.additionalCriteria == null) {
CriteriaGroup additionalCriteria = new CriteriaGroup();
additionalCriteria.type = "ALL";
indexRuleExpression.additionalCriteria = additionalCriteria;
} else {
if (!"ALL".equalsIgnoreCase(indexRuleExpression.additionalCriteria.type)) {
// move this CriteriaGroup inside a new parent CriteriaGroup where the parent CriteriaGroup.type == "ALL"
CriteriaGroup parentGroup = new CriteriaGroup();
parentGroup.type = "ALL";
parentGroup.groups = new CriteriaGroup[1];
parentGroup.groups[0] = indexRuleExpression.additionalCriteria;
indexRuleExpression.additionalCriteria = parentGroup;
}
}
// place each inclusion rule (which is a CriteriaGroup) in the indexRuleExpression.additionalCriteria.group array to create the 'allCriteriaExpression'
ArrayList<CriteriaGroup> additionalCriteriaGroups = new ArrayList<>();
if (indexRuleExpression.additionalCriteria.groups != null) {
additionalCriteriaGroups.addAll(Arrays.asList(indexRuleExpression.additionalCriteria.groups));
}
for (InclusionRule inclusionRule : p.getInclusionRules()) {
String inclusionRuleJSON = inclusionRule.getExpression();
CriteriaGroup inclusionRuleGroup = mapper.readValue(inclusionRuleJSON, CriteriaGroup.class);
additionalCriteriaGroups.add(inclusionRuleGroup);
}
// overwrite indexRule additional criteria groups with the new list of groups with inclusion rules
indexRuleExpression.additionalCriteria.groups = additionalCriteriaGroups.toArray(new CriteriaGroup[0]);
String allCriteriaExpression = mapper.writeValueAsString(indexRuleExpression); // index rule expression now contains all inclusion criteria as additional criteria
return allCriteriaExpression;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private List<FeasibilityReport.InclusionRuleStatistic> getSimulationInclusionRuleStatistics(int id, Source source) {
String sql = "select rule_sequence, name, person_count, gain_count, person_total from @tableQualifier.feas_study_inclusion_stats where study_id = @id ORDER BY rule_sequence";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", id, SessionUtils.sessionId());
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), inclusionRuleStatisticMapper);
}
private int countSetBits(long n) {
int count = 0;
while (n > 0) {
n &= (n - 1);
count++;
}
return count;
}
private String formatBitMask(Long n, int size) {
return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0"));
}
private final RowMapper<Long[]> simulationResultItemMapper = new RowMapper<Long[]>() {
@Override
public Long[] mapRow(ResultSet rs, int rowNum) throws SQLException {
Long[] resultItem = new Long[2];
resultItem[0] = rs.getLong("inclusion_rule_mask");
resultItem[1] = rs.getLong("person_count");
return resultItem;
}
};
private String getInclusionRuleTreemapData(int id, int inclusionRuleCount, Source source) {
String sql = "select inclusion_rule_mask, person_count from @tableQualifier.feas_study_result where study_id = @id";
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", id, SessionUtils.sessionId());
// [0] is the inclusion rule bitmask, [1] is the count of the match
List<Long[]> items = this.getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), simulationResultItemMapper);
Map<Integer, List<Long[]>> groups = new HashMap<>();
for (Long[] item : items) {
int bitsSet = countSetBits(item[0]);
if (!groups.containsKey(bitsSet)) {
groups.put(bitsSet, new ArrayList<Long[]>());
}
groups.get(bitsSet).add(item);
}
StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : [");
List<Integer> groupKeys = new ArrayList<Integer>(groups.keySet());
Collections.sort(groupKeys);
Collections.reverse(groupKeys);
int groupCount = 0;
// create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy)
for (Integer groupKey : groupKeys) {
if (groupCount > 0) {
treemapData.append(",");
}
treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey));
int groupItemCount = 0;
for (Long[] groupItem : groups.get(groupKey)) {
if (groupItemCount > 0) {
treemapData.append(",");
}
//sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}");
treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d}", formatBitMask(groupItem[0], inclusionRuleCount), groupItem[1]));
groupItemCount++;
}
groupCount++;
}
treemapData.append(StringUtils.repeat("]}", groupCount + 1));
return treemapData.toString();
}
public FeasibilityStudyDTO feasibilityStudyToDTO(FeasibilityStudy study) {
FeasibilityStudyDTO pDTO = new FeasibilityStudyDTO();
pDTO.id = study.getId();
pDTO.name = study.getName();
pDTO.description = study.getDescription();
pDTO.indexCohortId = study.getIndexRule().getId();
pDTO.matchingCohortId = study.getResultRule() != null ? study.getResultRule().getId() : null;
pDTO.createdBy = UserUtils.nullSafeLogin(study.getCreatedBy());
pDTO.createdDate = study.getCreatedDate();
pDTO.modifiedBy = UserUtils.nullSafeLogin(study.getModifiedBy());
pDTO.modifiedDate = study.getModifiedDate();
pDTO.indexRule = study.getIndexRule().getDetails().getExpression();
pDTO.indexDescription = study.getIndexRule().getDescription();
pDTO.inclusionRules = study.getInclusionRules();
return pDTO;
}
/**
* DO NOT USE
*
* @summary DO NOT USE
* @deprecated
* @return List<FeasibilityService.FeasibilityStudyListItem>
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public List<FeasibilityService.FeasibilityStudyListItem> getFeasibilityStudyList() {
return getTransactionTemplate().execute(transactionStatus -> {
Iterable<FeasibilityStudy> studies = this.feasibilityStudyRepository.findAll();
return StreamSupport.stream(studies.spliterator(), false).map(p -> {
FeasibilityService.FeasibilityStudyListItem item = new FeasibilityService.FeasibilityStudyListItem();
item.id = p.getId();
item.name = p.getName();
item.description = p.getDescription();
item.indexCohortId = p.getIndexRule().getId();
item.matchingCohortId = p.getResultRule() != null ? p.getResultRule().getId() : null;
item.createdBy = UserUtils.nullSafeLogin(p.getCreatedBy());
item.createdDate = p.getCreatedDate();
item.modifiedBy = UserUtils.nullSafeLogin(p.getModifiedBy());
item.modifiedDate = p.getModifiedDate();
return item;
}).collect(Collectors.toList());
});
}
/**
* Creates the feasibility study
*
* @summary DO NOT USE
* @deprecated
* @param study The feasibility study
* @return Feasibility study
*/
@PUT
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Transactional
public FeasibilityService.FeasibilityStudyDTO createStudy(FeasibilityService.FeasibilityStudyDTO study) {
return getTransactionTemplate().execute(transactionStatus -> {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
//create definition in 2 saves, first to get the generated ID for the new cohort definition (the index rule)
// then to associate the new definition with the index rule of the study
FeasibilityStudy newStudy = new FeasibilityStudy();
newStudy.setName(study.name)
.setDescription(study.description)
.setCreatedBy(user)
.setCreatedDate(currentTime)
.setInclusionRules(new ArrayList<InclusionRule>(study.inclusionRules));
// create index cohort
CohortDefinition indexRule = new CohortDefinition()
.setName("Index Population for Study: " + newStudy.getName())
.setDescription(study.indexDescription);
indexRule.setCreatedBy(user);
indexRule.setCreatedDate(currentTime);
indexRule.setExpressionType(ExpressionType.SIMPLE_EXPRESSION);
CohortDefinitionDetails indexDetails = new CohortDefinitionDetails();
indexDetails.setCohortDefinition(indexRule)
.setExpression(study.indexRule);
indexRule.setDetails(indexDetails);
newStudy.setIndexRule(indexRule);
// build matching cohort from inclusion rules if inclusion rules exist
if (newStudy.getInclusionRules().size() > 0) {
CohortDefinition resultDef = new CohortDefinition()
.setName("Matching Population for Study: " + newStudy.getName())
.setDescription(newStudy.getDescription());
resultDef.setCreatedBy(user);
resultDef.setCreatedDate(currentTime);
resultDef.setExpressionType(ExpressionType.SIMPLE_EXPRESSION);
CohortDefinitionDetails resultDetails = new CohortDefinitionDetails();
resultDetails.setCohortDefinition(resultDef)
.setExpression(getMatchingCriteriaExpression(newStudy));
resultDef.setDetails(resultDetails);
newStudy.setResultRule(resultDef);
}
FeasibilityStudy createdStudy = this.feasibilityStudyRepository.save(newStudy);
return feasibilityStudyToDTO(createdStudy);
});
}
/**
* Get the feasibility study by ID
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
* @return Feasibility study
*/
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Transactional(readOnly = true)
public FeasibilityService.FeasibilityStudyDTO getStudy(@PathParam("id") final int id) {
return getTransactionTemplate().execute(transactionStatus -> {
FeasibilityStudy s = this.feasibilityStudyRepository.findOneWithDetail(id);
return feasibilityStudyToDTO(s);
});
}
/**
* Update the feasibility study
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
* @param study The study information
* @return The updated study information
*/
@PUT
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Transactional
public FeasibilityService.FeasibilityStudyDTO saveStudy(@PathParam("id") final int id, FeasibilityStudyDTO study) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
FeasibilityStudy updatedStudy = this.feasibilityStudyRepository.findOne(id);
updatedStudy.setName(study.name)
.setDescription(study.description)
.setModifiedBy(user)
.setModifiedDate(currentTime)
.setInclusionRules(study.inclusionRules);
updatedStudy.getIndexRule()
.setName("Index Population for Study: " + updatedStudy.getName())
.setDescription(study.indexDescription)
.getDetails().setExpression(study.indexRule);
updatedStudy.getIndexRule().setModifiedBy(user);
updatedStudy.getIndexRule().setModifiedDate(currentTime);
CohortDefinition resultRule = updatedStudy.getResultRule();
if (updatedStudy.getInclusionRules().size() > 0) {
if (resultRule == null) {
resultRule = new CohortDefinition();
resultRule.setName("Matching Population for Study: " + updatedStudy.getName())
.setExpressionType(ExpressionType.SIMPLE_EXPRESSION);
resultRule.setCreatedBy(user);
resultRule.setCreatedDate(currentTime);
CohortDefinitionDetails resultDetails = new CohortDefinitionDetails();
resultDetails.setCohortDefinition(resultRule);
resultRule.setDetails(resultDetails);
updatedStudy.setResultRule(resultRule);
}
resultRule.setName("Matching Population for Study: " + updatedStudy.getName())
.setDescription(updatedStudy.getDescription())
.getDetails().setExpression(getMatchingCriteriaExpression(updatedStudy));
resultRule.setModifiedBy(user);
resultRule.setModifiedDate(currentTime);
} else {
updatedStudy.setResultRule(null);
if (resultRule != null) {
cohortDefinitionRepository.delete(resultRule);
}
}
this.feasibilityStudyRepository.save(updatedStudy);
return getStudy(id);
}
/**
* Generate the feasibility study
*
* @summary DO NOT USE
* @deprecated
* @param study_id The study ID
* @param sourceKey The source key
* @return JobExecutionResource
*/
@GET
@Path("/{study_id}/generate/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performStudy(@PathParam("study_id") final int study_id, @PathParam("sourceKey") final String sourceKey) {
Date startTime = Calendar.getInstance().getTime();
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager().getTransaction(requresNewTx);
FeasibilityStudy study = this.feasibilityStudyRepository.findOne(study_id);
CohortDefinition indexRule = this.cohortDefinitionRepository.findOne(study.getIndexRule().getId());
CohortGenerationInfo indexInfo = findCohortGenerationInfoBySourceId(indexRule.getGenerationInfoList(), source.getSourceId());
if (indexInfo == null) {
indexInfo = new CohortGenerationInfo(indexRule, source.getSourceId());
indexRule.getGenerationInfoList().add(indexInfo);
}
indexInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.cohortDefinitionRepository.save(indexRule);
if (study.getResultRule() != null)
{
CohortDefinition resultRule = this.cohortDefinitionRepository.findOne(study.getResultRule().getId());
CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultRule.getGenerationInfoList(), source.getSourceId());
if (resultInfo == null) {
resultInfo = new CohortGenerationInfo(resultRule, source.getSourceId());
resultRule.getGenerationInfoList().add(resultInfo);
}
resultInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.cohortDefinitionRepository.save(resultRule);
}
StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(), source.getSourceId());
if (studyInfo == null) {
studyInfo = new StudyGenerationInfo(study, source);
study.getStudyGenerationInfoList().add(studyInfo);
}
studyInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.feasibilityStudyRepository.save(study);
this.getTransactionTemplate().getTransactionManager().commit(initStatus);
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString("jobName", "performing feasibility study on " + indexRule.getName() + " : " + source.getSourceName() + " (" + source.getSourceKey() + ")");
builder.addString("cdm_database_schema", cdmTableQualifier);
builder.addString("results_database_schema", resultsTableQualifier);
builder.addString("target_database_schema", resultsTableQualifier);
builder.addString("target_dialect", source.getSourceDialect());
builder.addString("target_table", "cohort");
builder.addString("cohort_definition_id", ("" + indexRule.getId()));
builder.addString("study_id", ("" + study_id));
builder.addString("source_id", ("" + source.getSourceId()));
builder.addString("generate_stats", Boolean.TRUE.toString());
final JobParameters jobParameters = builder.toJobParameters();
final CancelableJdbcTemplate sourceJdbcTemplate = getSourceJdbcTemplate(source);
GenerateCohortTasklet indexRuleTasklet = new GenerateCohortTasklet(
sourceJdbcTemplate,
getTransactionTemplate(),
generationCacheHelper,
cohortDefinitionRepository,
sourceService
);
Step generateCohortStep = stepBuilders.get("performStudy.generateIndexCohort")
.tasklet(indexRuleTasklet)
.exceptionHandler(new TerminateJobStepExceptionHandler())
.build();
PerformFeasibilityTasklet simulateTasket = new PerformFeasibilityTasklet(sourceJdbcTemplate, getTransactionTemplate(), feasibilityStudyRepository, objectMapper);
Step performStudyStep = stepBuilders.get("performStudy.performStudy")
.tasklet(simulateTasket)
.build();
Job performStudyJob = jobBuilders.get("performStudy")
.start(generateCohortStep)
.next(performStudyStep)
.build();
JobExecutionResource jobExec = this.jobTemplate.launch(performStudyJob, jobParameters);
return jobExec;
}
/**
* Get simulation information
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
* @return List<StudyInfoDTO>
*/
@GET
@Path("/{id}/info")
@Produces(MediaType.APPLICATION_JSON)
@Transactional(readOnly = true)
public List<StudyInfoDTO> getSimulationInfo(@PathParam("id") final int id) {
FeasibilityStudy study = this.feasibilityStudyRepository.findOne(id);
List<StudyInfoDTO> result = new ArrayList<>();
for (StudyGenerationInfo generationInfo : study.getStudyGenerationInfoList()) {
StudyInfoDTO info = new StudyInfoDTO();
info.generationInfo = generationInfo;
info.summary = getSimulationSummary(id, generationInfo.getSource());
result.add(info);
}
return result;
}
/**
* Get simulation report
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
* @param sourceKey The source key
* @return FeasibilityReport
*/
@GET
@Path("/{id}/report/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
@Transactional
public FeasibilityReport getSimulationReport(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey) {
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
FeasibilityReport.Summary summary = getSimulationSummary(whitelist(id), source);
List<FeasibilityReport.InclusionRuleStatistic> inclusionRuleStats = getSimulationInclusionRuleStatistics(whitelist(id), source);
String treemapData = getInclusionRuleTreemapData(whitelist(id), inclusionRuleStats.size(), source);
FeasibilityReport report = new FeasibilityReport();
report.summary = summary;
report.inclusionRuleStats = inclusionRuleStats;
report.treemapData = treemapData;
return report;
}
/**
* Copies the specified cohort definition
*
* @summary DO NOT USE
* @deprecated
* @param id - the Cohort Definition ID to copy
* @return the copied feasibility study as a FeasibilityStudyDTO
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/copy")
@javax.transaction.Transactional
public FeasibilityStudyDTO copy(@PathParam("id") final int id) {
FeasibilityStudyDTO sourceStudy = getStudy(id);
sourceStudy.id = null; // clear the ID
sourceStudy.name = String.format(Constants.Templates.ENTITY_COPY_PREFIX, sourceStudy.name);
return createStudy(sourceStudy);
}
/**
* Deletes the specified feasibility study
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}")
public void delete(@PathParam("id") final int id) {
feasibilityStudyRepository.delete(id);
}
/**
* Deletes the specified study for the selected source
*
* @summary DO NOT USE
* @deprecated
* @param id The study ID
* @param sourceKey The source key
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/info/{sourceKey}")
@Transactional
public void deleteInfo(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey) {
FeasibilityStudy study = feasibilityStudyRepository.findOne(id);
StudyGenerationInfo itemToRemove = null;
for (StudyGenerationInfo info : study.getStudyGenerationInfoList())
{
if (info.getSource().getSourceKey().equals(sourceKey))
itemToRemove = info;
}
if (itemToRemove != null)
study.getStudyGenerationInfoList().remove(itemToRemove);
feasibilityStudyRepository.save(study);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/GenerationTaskExceptionHandler.java | src/main/java/org/ohdsi/webapi/service/GenerationTaskExceptionHandler.java | package org.ohdsi.webapi.service;
import org.ohdsi.webapi.util.TempTableCleanupManager;
import org.springframework.batch.repeat.RepeatContext;
import org.springframework.batch.repeat.exception.ExceptionHandler;
public class GenerationTaskExceptionHandler implements ExceptionHandler {
private TempTableCleanupManager cleanupManager;
public GenerationTaskExceptionHandler(TempTableCleanupManager cleanupManager) {
this.cleanupManager = cleanupManager;
}
@Override
public void handleException(RepeatContext context, Throwable throwable) throws Throwable {
cleanupManager.cleanupTempTables();
throw throwable;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/EvidenceService.java | src/main/java/org/ohdsi/webapi/service/EvidenceService.java | package org.ohdsi.webapi.service;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Collection;
import java.util.ArrayList;
import java.util.List;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.stream.Collectors;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationInfoRepository;
import org.ohdsi.webapi.evidence.CohortStudyMapping;
import org.ohdsi.webapi.evidence.CohortStudyMappingRepository;
import org.ohdsi.webapi.evidence.ConceptCohortMapping;
import org.ohdsi.webapi.evidence.ConceptCohortMappingRepository;
import org.ohdsi.webapi.evidence.ConceptOfInterestMapping;
import org.ohdsi.webapi.evidence.ConceptOfInterestMappingRepository;
import org.ohdsi.webapi.evidence.DrugEvidence;
import org.ohdsi.webapi.evidence.EvidenceDetails;
import org.ohdsi.webapi.evidence.EvidenceSummary;
import org.ohdsi.webapi.evidence.EvidenceUniverse;
import org.ohdsi.webapi.evidence.HoiEvidence;
import org.ohdsi.webapi.evidence.DrugHoiEvidence;
import org.ohdsi.webapi.evidence.DrugLabel;
import org.ohdsi.webapi.evidence.DrugLabelInfo;
import org.ohdsi.webapi.evidence.DrugLabelRepository;
import org.ohdsi.webapi.evidence.EvidenceInfo;
import org.ohdsi.webapi.evidence.DrugRollUpEvidence;
import org.ohdsi.webapi.evidence.Evidence;
import org.ohdsi.webapi.evidence.SpontaneousReport;
import org.ohdsi.webapi.evidence.EvidenceSearch;
import org.ohdsi.webapi.evidence.negativecontrols.NegativeControlDTO;
import org.ohdsi.webapi.evidence.negativecontrols.NegativeControlMapper;
import org.ohdsi.webapi.evidence.negativecontrols.NegativeControlTaskParameters;
import org.ohdsi.webapi.evidence.negativecontrols.NegativeControlTasklet;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.job.JobTemplate;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.PreparedSqlRender;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
/**
* Provides REST services for querying the Common Evidence Model
*
* @summary REST services for querying the Common Evidence Model See
* <a href="https://github.com/OHDSI/CommonEvidenceModel">https://github.com/OHDSI/CommonEvidenceModel</a>
*/
@Path("/evidence")
@Component
public class EvidenceService extends AbstractDaoService implements GeneratesNotification {
private static final String NAME = "negativeControlsAnalysisJob";
@Autowired
private JobTemplate jobTemplate;
@Autowired
private DrugLabelRepository drugLabelRepository;
@Autowired
private ConceptCohortMappingRepository mappingRepository;
@Autowired
private ConceptOfInterestMappingRepository conceptOfInterestMappingRepository;
@Autowired
private CohortStudyMappingRepository cohortStudyMappingRepository;
@Autowired
private ConceptSetGenerationInfoRepository conceptSetGenerationInfoRepository;
@Autowired
private ConceptSetService conceptSetService;
private final RowMapper<DrugLabelInfo> drugLabelRowMapper = new RowMapper<DrugLabelInfo>() {
@Override
public DrugLabelInfo mapRow(final ResultSet rs, final int arg1) throws SQLException {
final DrugLabelInfo returnVal = new DrugLabelInfo();
returnVal.conceptId = rs.getString("CONCEPT_ID");
returnVal.conceptName = rs.getString("CONCEPT_NAME");
returnVal.usaProductLabelExists = rs.getInt("US_SPL_LABEL");
return returnVal;
}
};
public static class DrugConditionSourceSearchParams {
@JsonProperty("targetDomain")
public String targetDomain = "CONDITION";
@JsonProperty("drugConceptIds")
public int[] drugConceptIds;
@JsonProperty("conditionConceptIds")
public int[] conditionConceptIds;
@JsonProperty("sourceIds")
public String[] sourceIds;
public String getDrugConceptIds() {
return StringUtils.join(drugConceptIds, ',');
}
public String getConditionConceptIds() {
return StringUtils.join(conditionConceptIds, ',');
}
public String getSourceIds() {
if (sourceIds != null) {
List<String> ids = Arrays.stream(sourceIds)
.map(sourceId -> sourceId.replaceAll("(\"|')", ""))
.collect(Collectors.toList());
return "'" + StringUtils.join(ids, "','") + "'";
}
return "''";
}
}
/**
* <a href="https://github.com/OHDSI/Penelope">PENELOPE</a> function: search
* the cohort_study table for the selected cohortId in the WebAPI DB
*
* @summary Find studies for a cohort - will be depreciated
* @deprecated
* @param cohortId The cohort Id
* @return A list of studies related to the cohort
*/
@GET
@Path("study/{cohortId}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<CohortStudyMapping> getCohortStudyMapping(@PathParam("cohortId") int cohortId) {
return cohortStudyMappingRepository.findByCohortDefinitionId(cohortId);
}
/**
* <a href="https://github.com/OHDSI/Penelope">PENELOPE</a> function: search
* the COHORT_CONCEPT_MAP for the selected cohortId in the WebAPI DB
*
* @summary Find cohorts for a concept - will be depreciated
* @deprecated
* @param conceptId The concept Id of interest
* @return A list of cohorts for the specified conceptId
*/
@GET
@Path("mapping/{conceptId}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<ConceptCohortMapping> getConceptCohortMapping(@PathParam("conceptId") int conceptId) {
return mappingRepository.findByConceptId(conceptId);
}
/**
* <a href="https://github.com/OHDSI/Penelope">PENELOPE</a> function:
* reference to a manually curated table related concept_of_interest in
* WebAPI for use with PENELOPE. This will be depreciated in a future
* release.
*
* @summary Find a custom concept mapping - will be depreciated
* @deprecated
* @param conceptId The conceptId of interest
* @return A list of concepts based on the conceptId of interest
*/
@GET
@Path("conceptofinterest/{conceptId}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<ConceptOfInterestMapping> getConceptOfInterest(@PathParam("conceptId") int conceptId) {
return conceptOfInterestMappingRepository.findAllByConceptId(conceptId);
}
/**
* <a href="https://github.com/OHDSI/Penelope">PENELOPE</a> function:
* reference to the list of product labels in the WebAPI DRUG_LABELS table
* that associates a product label SET_ID to the RxNorm ingredient. This
* will be depreciated in a future release as this can be found using the
* OMOP vocabulary
*
* @summary Find a drug label - will be depreciated
* @deprecated
* @param setid The drug label setId
* @return The set of drug labels that match the setId specified.
*/
@GET
@Path("label/{setid}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<DrugLabel> getDrugLabel(@PathParam("setid") String setid) {
return drugLabelRepository.findAllBySetid(setid);
}
/**
* <a href="https://github.com/OHDSI/Penelope">PENELOPE</a> function: search
* the DRUG_LABELS.search_name for the searchTerm
*
* @summary Search for a drug label - will be depreciated
* @deprecated
* @param searchTerm The search term
* @return A list of drug labels matching the search term
*/
@GET
@Path("labelsearch/{searchTerm}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<DrugLabel> searchDrugLabels(@PathParam("searchTerm") String searchTerm) {
return drugLabelRepository.searchNameContainsTerm(searchTerm);
}
/**
* Provides a high level description of the information found in the Common
* Evidence Model (CEM).
*
* @summary Get summary of the Common Evidence Model (CEM) contents
* @param sourceKey The source key containing the CEM daimon
* @return A collection of evidence information stored in CEM
*/
@GET
@Path("{sourceKey}/info")
@Produces(MediaType.APPLICATION_JSON)
public Collection<EvidenceInfo> getInfo(@PathParam("sourceKey") String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
String sqlPath = "/resources/evidence/sql/getInfo.sql";
String tqName = "cem_schema";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.CEM);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sqlPath, tqName, tqValue);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), (rs, rowNum) -> {
EvidenceInfo info = new EvidenceInfo();
info.title = rs.getString("TITLE");
info.description = rs.getString("DESCRIPTION");
info.provenance = rs.getString("PROVENANCE");
info.contributor = rs.getString("CONTRIBUTOR");
info.contactName = rs.getString("CONTACT_NAME");
info.creationDate = rs.getDate("CREATION_DATE");
info.coverageStartDate = rs.getDate("COVERAGE_START_DATE");
info.coverageEndDate = rs.getDate("COVERAGE_END_DATE");
info.versionIdentifier = rs.getString("VERSION_IDENTIFIER");
return info;
});
}
/**
* Searches the evidence base for evidence related to one ore more drug and
* condition combinations for the source(s) specified
*
* @param sourceKey The source key containing the CEM daimon
* @param searchParams
* @return
*/
@POST
@Path("{sourceKey}/drugconditionpairs")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Collection<DrugHoiEvidence> getDrugConditionPairs(@PathParam("sourceKey") String sourceKey, DrugConditionSourceSearchParams searchParams) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
String sql = getDrugHoiEvidenceSQL(source, searchParams);
return getSourceJdbcTemplate(source).query(sql, (rs, rowNum) -> {
String evidenceSource = rs.getString("SOURCE_ID");
String mappingType = rs.getString("MAPPING_TYPE");
String drugConceptId = rs.getString("DRUG_CONCEPT_ID");
String drugConceptName = rs.getString("DRUG_CONCEPT_NAME");
String conditionConceptId = rs.getString("CONDITION_CONCEPT_ID");
String conditionConceptName = rs.getString("CONDITION_CONCEPT_NAME");
String uniqueIdentifier = rs.getString("UNIQUE_IDENTIFIER");
DrugHoiEvidence evidence = new DrugHoiEvidence();
evidence.evidenceSource = evidenceSource;
evidence.mappingType = mappingType;
evidence.drugConceptId = drugConceptId;
evidence.drugConceptName = drugConceptName;
evidence.hoiConceptId = conditionConceptId;
evidence.hoiConceptName = conditionConceptName;
evidence.uniqueIdentifier = uniqueIdentifier;
return evidence;
});
}
/**
* Retrieves a list of evidence for the specified drug conceptId
*
* @summary Get Evidence For Drug
* @param sourceKey The source key containing the CEM daimon
* @param id - An RxNorm Drug Concept Id
* @return A list of evidence
*/
@GET
@Path("{sourceKey}/drug/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<DrugEvidence> getDrugEvidence(@PathParam("sourceKey") String sourceKey, @PathParam("id") final Long id) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetEvidenceForConcept(source, id);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), (rs, rowNum) -> {
String evidenceSource = rs.getString("SOURCE_ID");
String hoi = rs.getString("CONCEPT_ID_2");
String hoiName = rs.getString("CONCEPT_ID_2_NAME");
String statType = rs.getString("STATISTIC_VALUE_TYPE");
BigDecimal statVal = rs.getBigDecimal("STATISTIC_VALUE");
String relationshipType = rs.getString("RELATIONSHIP_ID");
String uniqueIdentifier = rs.getString("UNIQUE_IDENTIFIER");
String uniqueIdentifierType = rs.getString("UNIQUE_IDENTIFIER_TYPE");
DrugEvidence evidence = new DrugEvidence();
evidence.evidenceSource = evidenceSource;
evidence.hoiConceptId = hoi;
evidence.hoiConceptName = hoiName;
evidence.relationshipType = relationshipType;
evidence.statisticType = statType;
evidence.statisticValue = statVal;
evidence.uniqueIdentifier = uniqueIdentifier;
evidence.uniqueIdentifierType = uniqueIdentifierType;
return evidence;
});
}
/**
* Retrieves a list of evidence for the specified health outcome of interest
* (hoi) conceptId
*
* @summary Get Evidence For Health Outcome
* @param sourceKey The source key containing the CEM daimon
* @param id The conceptId for the health outcome of interest
* @return A list of evidence
*/
@GET
@Path("{sourceKey}/hoi/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<HoiEvidence> getHoiEvidence(@PathParam("sourceKey") String sourceKey, @PathParam("id") final Long id) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetEvidenceForConcept(source, id);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), (rs, rowNum) -> {
String evidenceSource = rs.getString("SOURCE_ID");
String drug = rs.getString("CONCEPT_ID_1");
String drugName = rs.getString("CONCEPT_ID_1_NAME");
String statType = rs.getString("STATISTIC_VALUE_TYPE");
BigDecimal statVal = rs.getBigDecimal("STATISTIC_VALUE");
String relationshipType = rs.getString("RELATIONSHIP_ID");
String uniqueIdentifier = rs.getString("UNIQUE_IDENTIFIER");
String uniqueIdentifierType = rs.getString("UNIQUE_IDENTIFIER_TYPE");
HoiEvidence evidence = new HoiEvidence();
evidence.evidenceSource = evidenceSource;
evidence.drugConceptId = drug;
evidence.drugConceptName = drugName;
evidence.relationshipType = relationshipType;
evidence.statisticType = statType;
evidence.statisticValue = statVal;
evidence.uniqueIdentifier = uniqueIdentifier;
evidence.uniqueIdentifierType = uniqueIdentifierType;
return evidence;
});
}
/**
* Retrieves a list of RxNorm ingredients from the concept set and
* determines if we have label evidence for them.
*
* @summary Get Drug Labels For RxNorm Ingredients
* @param sourceKey The source key of the CEM daimon
* @param identifiers The list of RxNorm Ingredients concepts or ancestors
* @return A list of evidence for the drug and HOI
*/
@Path("{sourceKey}/druglabel")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<DrugLabelInfo> getDrugIngredientLabel(@PathParam("sourceKey") String sourceKey, long[] identifiers) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return executeGetDrugLabels(identifiers, source);
}
/**
* Retrieves a list of evidence for the specified health outcome of interest
* and drug as defined in the key parameter.
*
* @summary Get Evidence For Drug & Health Outcome
* @param sourceKey The source key of the CEM daimon
* @param key The key must be structured as {drugConceptId}-{hoiConceptId}
* @return A list of evidence for the drug and HOI
*/
@GET
@Path("{sourceKey}/drughoi/{key}")
@Produces(MediaType.APPLICATION_JSON)
public List<DrugHoiEvidence> getDrugHoiEvidence(@PathParam("sourceKey") String sourceKey, @PathParam("key") final String key) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetDrugHoiEvidence(key, source);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), (rs, rowNum) -> {
String evidenceSource = rs.getString("SOURCE_ID");
String drug = rs.getString("CONCEPT_ID_1");
String drugName = rs.getString("CONCEPT_ID_1_NAME");
String hoi = rs.getString("CONCEPT_ID_2");
String hoiName = rs.getString("CONCEPT_ID_2_NAME");
String statType = rs.getString("STATISTIC_VALUE_TYPE");
BigDecimal statVal = rs.getBigDecimal("STATISTIC_VALUE");
String relationshipType = rs.getString("RELATIONSHIP_ID");
String uniqueIdentifier = rs.getString("UNIQUE_IDENTIFIER");
String uniqueIdentifierType = rs.getString("UNIQUE_IDENTIFIER_TYPE");
DrugHoiEvidence evidence = new DrugHoiEvidence();
evidence.evidenceSource = evidenceSource;
evidence.drugConceptId = drug;
evidence.drugConceptName = drugName;
evidence.hoiConceptId = hoi;
evidence.hoiConceptName = hoiName;
evidence.relationshipType = relationshipType;
evidence.statisticType = statType;
evidence.statisticValue = statVal;
evidence.uniqueIdentifier = uniqueIdentifier;
evidence.uniqueIdentifierType = uniqueIdentifierType;
return evidence;
});
}
/**
* Originally provided a roll up of evidence from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param id The RxNorm drug conceptId
* @param filter Specified the type of rollup level (ingredient, clinical
* drug, branded drug)
* @return A list of evidence rolled up
*/
@GET
@Path("{sourceKey}/drugrollup/{filter}/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDrugRollupIngredientEvidence(@PathParam("sourceKey") String sourceKey, @PathParam("id") final Long id, @PathParam("filter") final String filter) {
String warningMessage = "This method will be deprecated in the next release. Instead, please use the new REST endpoint: evidence/{sourceKey}/drug/{id}";
ArrayList<DrugRollUpEvidence> evidence = new ArrayList<>();
return Response.ok(evidence).header("Warning: 299", warningMessage).build();
}
/**
* Retrieve all evidence from Common Evidence Model (CEM) for a given
* conceptId
*
* @summary Get evidence for a concept
* @param sourceKey The source key of the CEM daimon
* @param id The conceptId of interest
* @return A list of evidence matching the conceptId of interest
*/
@GET
@Path("{sourceKey}/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Collection<Evidence> getEvidence(@PathParam("sourceKey") String sourceKey, @PathParam("id") final Long id) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = prepareGetEvidenceForConcept(source, id);
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), (rs, rowNum) -> {
String evidenceSource = rs.getString("SOURCE_ID");
String drug = rs.getString("CONCEPT_ID_1");
String drugName = rs.getString("CONCEPT_ID_1_NAME");
String hoi = rs.getString("CONCEPT_ID_2");
String hoiName = rs.getString("CONCEPT_ID_2_NAME");
String statType = rs.getString("STATISTIC_VALUE_TYPE");
BigDecimal statVal = rs.getBigDecimal("STATISTIC_VALUE");
String relationshipType = rs.getString("RELATIONSHIP_ID");
String uniqueIdentifier = rs.getString("UNIQUE_IDENTIFIER");
String uniqueIdentifierType = rs.getString("UNIQUE_IDENTIFIER_TYPE");
Evidence evidence = new Evidence();
evidence.evidenceSource = evidenceSource;
evidence.drugConceptId = drug;
evidence.drugConceptName = drugName;
evidence.hoiConceptId = hoi;
evidence.hoiConceptName = hoiName;
evidence.relationshipType = relationshipType;
evidence.statisticType = statType;
evidence.statisticValue = statVal;
evidence.uniqueIdentifier = uniqueIdentifier;
evidence.uniqueIdentifierType = uniqueIdentifierType;
return evidence;
});
}
/**
* Originally provided an evidence summary from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param conditionID The condition conceptId
* @param drugID The drug conceptId
* @param evidenceGroup The evidence group
* @return A summary of evidence
*/
@GET
@Path("{sourceKey}/evidencesummary")
@Produces(MediaType.APPLICATION_JSON)
public Response getEvidenceSummaryBySource(@PathParam("sourceKey") String sourceKey, @QueryParam("conditionID") String conditionID, @QueryParam("drugID") String drugID, @QueryParam("evidenceGroup") String evidenceGroup) {
String warningMessage = "This method will be deprecated in the next release. Instead, please use the new REST endpoint: evidence/{sourceKey}/drug/{id}";
ArrayList<EvidenceSummary> evidenceSummary = new ArrayList<>();
return Response.ok(evidenceSummary).header("Warning: 299", warningMessage).build();
}
/**
* Originally provided an evidence details from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param conditionID The condition conceptId
* @param drugID The drug conceptId
* @param evidenceType The evidence type
* @return A list of evidence details
* @throws org.codehaus.jettison.json.JSONException
* @throws java.io.IOException
*/
@GET
@Path("{sourceKey}/evidencedetails")
@Produces(MediaType.APPLICATION_JSON)
public Response getEvidenceDetails(@PathParam("sourceKey") String sourceKey,
@QueryParam("conditionID") String conditionID,
@QueryParam("drugID") String drugID,
@QueryParam("evidenceType") String evidenceType)
throws JSONException, IOException {
String warningMessage = "This method will be deprecated in the next release. Instead, please use the new REST endpoint: evidence/{sourceKey}/drug/{id}";
ArrayList<EvidenceDetails> evidenceDetails = new ArrayList<>();
return Response.ok(evidenceDetails).header("Warning: 299", warningMessage).build();
}
/**
* Originally provided an summary from spontaneous reports from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param search The search term
* @return A list of spontaneous report summaries
* @throws JSONException
* @throws IOException
*/
@POST
@Path("{sourceKey}/spontaneousreports")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response getSpontaneousReports(@PathParam("sourceKey") String sourceKey, EvidenceSearch search) throws JSONException, IOException {
String warningMessage = "This method will be deprecated in the next release.";
ArrayList<SpontaneousReport> returnVal = new ArrayList<>();
return Response.ok(returnVal).header("Warning: 299", warningMessage).build();
}
/**
* Originally provided an evidence search from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param search The search term
* @return A list of evidence
* @throws JSONException
* @throws IOException
*/
@POST
@Path("{sourceKey}/evidencesearch")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response evidenceSearch(@PathParam("sourceKey") String sourceKey, EvidenceSearch search) throws JSONException, IOException {
String warningMessage = "This method will be deprecated in the next release.";
ArrayList<EvidenceUniverse> returnVal = new ArrayList<>();
return Response.ok(returnVal).header("Warning: 299", warningMessage).build();
}
/**
* Originally provided a label evidence search from LAERTES
*
* @summary Depreciated
* @deprecated
* @param sourceKey The source key of the CEM daimon
* @param search The search term
* @return A list of evidence
* @throws JSONException
* @throws IOException
*/
@POST
@Path("{sourceKey}/labelevidence")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response labelEvidence(@PathParam("sourceKey") String sourceKey, EvidenceSearch search) throws JSONException, IOException {
String warningMessage = "This method will be deprecated in the next release.";
ArrayList<EvidenceUniverse> returnVal = new ArrayList<>();
return Response.ok(returnVal).header("Warning: 299", warningMessage).build();
}
/**
* Queues up a negative control generation task to compute negative controls
* using Common Evidence Model (CEM)
*
* @summary Generate negative controls
* @param sourceKey The source key of the CEM daimon
* @param task - The negative control task with parameters
* @return information about the negative control job
* @throws Exception
*/
@POST
@Path("{sourceKey}/negativecontrols")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource queueNegativeControlsJob(@PathParam("sourceKey") String sourceKey, NegativeControlTaskParameters task) throws Exception {
if (task == null) {
return null;
}
JobParametersBuilder builder = new JobParametersBuilder();
// Get a JDBC template for the OHDSI source repository
// and the source dialect for use when we write the results
// back to the OHDSI repository
JdbcTemplate jdbcTemplate = getJdbcTemplate();
task.setJdbcTemplate(jdbcTemplate);
String ohdsiDatasourceSourceDialect = getSourceDialect();
task.setSourceDialect(ohdsiDatasourceSourceDialect);
task.setOhdsiSchema(this.getOhdsiSchema());
// source key comes from the client, we look it up here and hand it off to the tasklet
Source source = getSourceRepository().findBySourceKey(sourceKey);
// Verify the source has both the evidence & results daimon configured
// and throw an exception if either is missing
String cemSchema = source.getTableQualifier(SourceDaimon.DaimonType.CEM);
String cemResultsSchema = source.getTableQualifierOrNull(SourceDaimon.DaimonType.CEMResults);
if (cemSchema == null) {
throw NotFoundException("Evidence daimon not configured for source.");
}
if (cemResultsSchema == null) {
throw NotFoundException("Results daimon not configured for source.");
}
task.setSource(source);
if (!StringUtils.isEmpty(task.getJobName())) {
builder.addString("jobName", limitJobParams(task.getJobName()));
}
builder.addString("concept_set_id", ("" + task.getConceptSetId()));
builder.addString("concept_set_name", task.getConceptSetName());
builder.addString("concept_domain_id", task.getConceptDomainId());
builder.addString("source_id", ("" + source.getSourceId()));
// Create a set of parameters to store with the generation info
JSONObject params = new JSONObject();
// If/when we want to treat these concepts as lists, this
// code will do the trick
//JSONArray conceptsToInclude = new JSONArray();
//JSONArray conceptsToExclude = new JSONArray();
//for(int i = 0; i < task.getConceptsToInclude().length; i++) {
// conceptsToInclude.put(task.getConceptsToInclude()[i]);
//}
//for(int i = 0; i < task.getConceptsToExclude().length; i++) {
// conceptsToExclude.put(task.getConceptsToExclude()[i]);
//}
params.put("csToInclude", task.getCsToInclude());
params.put("csToExclude", task.getCsToExclude());
builder.addString("params", params.toString());
// Resolve the concept set expressions for the included and excluded
// concept sets if specified
ConceptSetExpressionQueryBuilder csBuilder = new ConceptSetExpressionQueryBuilder();
ConceptSetExpression csExpression;
String csSQL = "";
if (task.getCsToInclude() > 0) {
try {
csExpression = conceptSetService.getConceptSetExpression(task.getCsToInclude());
csSQL = csBuilder.buildExpressionQuery(csExpression);
} catch (Exception e) {
log.warn("Failed to build Inclusion expression query", e);
}
}
task.setCsToIncludeSQL(csSQL);
csSQL = "";
if (task.getCsToExclude() > 0) {
try {
csExpression = conceptSetService.getConceptSetExpression(task.getCsToExclude());
csSQL = csBuilder.buildExpressionQuery(csExpression);
} catch (Exception e) {
log.warn("Failed to build Exclusion expression query", e);
}
}
task.setCsToExcludeSQL(csSQL);
final String taskString = task.toString();
final JobParameters jobParameters = builder.toJobParameters();
log.info("Beginning run for negative controls analysis task: {}", taskString);
NegativeControlTasklet tasklet = new NegativeControlTasklet(task, getSourceJdbcTemplate(task.getSource()), task.getJdbcTemplate(),
getTransactionTemplate(), this.conceptSetGenerationInfoRepository, this.getSourceDialect());
return this.jobTemplate.launchTasklet(NAME, "negativeControlsAnalysisStep", tasklet, jobParameters);
}
/**
* Retrieves the negative controls for a concept set
*
* @summary Retrieve negative controls
* @param sourceKey The source key of the CEM daimon
* @param conceptSetId The concept set id
* @return The list of negative controls
*/
@GET
@Path("{sourceKey}/negativecontrols/{conceptsetid}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Collection<NegativeControlDTO> getNegativeControls(@PathParam("sourceKey") String sourceKey, @PathParam("conceptsetid") int conceptSetId) throws Exception {
Source source = getSourceRepository().findBySourceKey(sourceKey);
PreparedStatementRenderer psr = this.prepareGetNegativeControls(source, conceptSetId);
final List<NegativeControlDTO> recs = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new NegativeControlMapper());
return recs;
}
/**
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java | src/main/java/org/ohdsi/webapi/service/ConceptSetService.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import java.io.ByteArrayOutputStream;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import javax.transaction.Transactional;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import javax.cache.CacheManager;
import javax.cache.configuration.MutableConfiguration;
import org.apache.shiro.authz.UnauthorizedException;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.vocabulary.Concept;
import org.ohdsi.webapi.check.CheckResult;
import org.ohdsi.webapi.check.checker.conceptset.ConceptSetChecker;
import org.ohdsi.webapi.conceptset.ConceptSet;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationInfo;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationInfoRepository;
import org.ohdsi.webapi.conceptset.ConceptSetItem;
import org.ohdsi.webapi.conceptset.dto.ConceptSetVersionFullDTO;
import org.ohdsi.webapi.conceptset.annotation.ConceptSetAnnotation;
import org.ohdsi.webapi.exception.ConceptNotExistException;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.service.annotations.SearchDataTransformer;
import org.ohdsi.webapi.service.dto.AnnotationDetailsDTO;
import org.ohdsi.webapi.service.dto.ConceptSetDTO;
import org.ohdsi.webapi.service.dto.SaveConceptSetAnnotationsRequest;
import org.ohdsi.webapi.service.dto.AnnotationDTO;
import org.ohdsi.webapi.service.dto.CopyAnnotationsRequest;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceInfo;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.tag.domain.HasTags;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.util.CacheHelper;
import org.ohdsi.webapi.util.ExportUtil;
import org.ohdsi.webapi.util.NameUtils;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.versioning.domain.ConceptSetVersion;
import org.ohdsi.webapi.versioning.domain.Version;
import org.ohdsi.webapi.versioning.domain.VersionBase;
import org.ohdsi.webapi.versioning.domain.VersionType;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.ohdsi.webapi.versioning.service.VersionService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.cache.JCacheManagerCustomizer;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.stereotype.Component;
/**
* Provides REST services for working with
* concept sets.
*
* @summary Concept Set
*/
@Component
@Transactional
@Path("/conceptset/")
public class ConceptSetService extends AbstractDaoService implements HasTags<Integer> {
//create cache
@Component
public static class CachingSetup implements JCacheManagerCustomizer {
public static final String CONCEPT_SET_LIST_CACHE = "conceptSetList";
@Override
public void customize(CacheManager cacheManager) {
// due to unit tests causing application contexts to reload cache manager caches, we
// have to check for the existance of a cache before creating it
Set<String> cacheNames = CacheHelper.getCacheNames(cacheManager);
// Evict when a cohort definition is created or updated, or permissions, or tags
if (!cacheNames.contains(CONCEPT_SET_LIST_CACHE)) {
cacheManager.createCache(CONCEPT_SET_LIST_CACHE, new MutableConfiguration<String, Collection<ConceptSetDTO>>()
.setTypes(String.class, (Class<Collection<ConceptSetDTO>>) (Class<?>) List.class)
.setStoreByValue(false)
.setStatisticsEnabled(true));
}
}
}
@Autowired
private ConceptSetGenerationInfoRepository conceptSetGenerationInfoRepository;
@Autowired
private VocabularyService vocabService;
@Autowired
private SourceService sourceService;
@Autowired
private SourceAccessor sourceAccessor;
@Autowired
private UserRepository userRepository;
@Autowired
private GenericConversionService conversionService;
@Autowired
private Security security;
@Autowired
private PermissionService permissionService;
@Autowired
private ConceptSetChecker checker;
@Autowired
private VersionService<ConceptSetVersion> versionService;
@Autowired
private SearchDataTransformer searchDataTransformer;
@Autowired
private ObjectMapper mapper;
@Value("${security.defaultGlobalReadPermissions}")
private boolean defaultGlobalReadPermissions;
public static final String COPY_NAME = "copyName";
/**
* Get the concept set based in the identifier
*
* @summary Get concept set by ID
* @param id The concept set ID
* @return The concept set definition
*/
@Path("{id}")
@GET
@Produces(MediaType.APPLICATION_JSON)
public ConceptSetDTO getConceptSet(@PathParam("id") final int id) {
ConceptSet conceptSet = getConceptSetRepository().findById(id);
ExceptionUtils.throwNotFoundExceptionIfNull(conceptSet, String.format("There is no concept set with id = %d.", id));
return conversionService.convert(conceptSet, ConceptSetDTO.class);
}
/**
* Get the full list of concept sets in the WebAPI database
*
* @summary Get all concept sets
* @return A list of all concept sets in the WebAPI database
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Cacheable(cacheNames = ConceptSetService.CachingSetup.CONCEPT_SET_LIST_CACHE, key = "@permissionService.getSubjectCacheKey()")
public Collection<ConceptSetDTO> getConceptSets() {
return getTransactionTemplate().execute(
transactionStatus -> StreamSupport.stream(getConceptSetRepository().findAll().spliterator(), false)
.filter(!defaultGlobalReadPermissions ? entity -> permissionService.hasReadAccess(entity) : entity -> true)
.map(conceptSet -> {
ConceptSetDTO dto = conversionService.convert(conceptSet, ConceptSetDTO.class);
permissionService.fillWriteAccess(conceptSet, dto);
permissionService.fillReadAccess(conceptSet, dto);
return dto;
})
.collect(Collectors.toList()));
}
/**
* Get the concept set items for a selected concept set ID.
*
* @summary Get the concept set items
* @param id The concept set identifier
* @return A list of concept set items
*/
@GET
@Path("{id}/items")
@Produces(MediaType.APPLICATION_JSON)
public Iterable<ConceptSetItem> getConceptSetItems(@PathParam("id") final int id) {
return getConceptSetItemRepository().findAllByConceptSetId(id);
}
/**
* Get the concept set expression for a selected version of the expression
*
* @summary Get concept set expression by version
* @param id The concept set ID
* @param version The version identifier
* @return The concept set expression
*/
@GET
@Path("{id}/version/{version}/expression")
@Produces(MediaType.APPLICATION_JSON)
public ConceptSetExpression getConceptSetExpression(@PathParam("id") final int id,
@PathParam("version") final int version) {
SourceInfo sourceInfo = sourceService.getPriorityVocabularySourceInfo();
if (sourceInfo == null) {
throw new UnauthorizedException();
}
return getConceptSetExpression(id, version, sourceInfo);
}
/**
* Get the concept set expression by version for the selected
* source key. NOTE: This method requires the specification
* of a source key but it does not appear to be used by the underlying
* code.
*
* @summary Get concept set expression by version and source.
* @param id The concept set identifier
* @param version The version of the concept set
* @param sourceKey The source key
* @return The concept set expression for the selected version
*/
@GET
@Path("{id}/version/{version}/expression/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
public ConceptSetExpression getConceptSetExpression(@PathParam("id") final int id,
@PathParam("version") final int version,
@PathParam("sourceKey") final String sourceKey) {
SourceInfo sourceInfo = sourceService.getPriorityVocabularySourceInfo();
if (sourceInfo == null) {
throw new UnauthorizedException();
}
return getConceptSetExpression(id, version, sourceInfo);
}
/**
* Get the concept set expression by identifier
*
* @summary Get concept set by ID
* @param id The concept set identifier
* @return The concept set expression
*/
@GET
@Path("{id}/expression")
@Produces(MediaType.APPLICATION_JSON)
public ConceptSetExpression getConceptSetExpression(@PathParam("id") final int id) {
SourceInfo sourceInfo = sourceService.getPriorityVocabularySourceInfo();
if (sourceInfo == null) {
throw new UnauthorizedException();
}
return getConceptSetExpression(id, null, sourceInfo);
}
/**
* Get the concept set expression by identifier and source key
*
* @summary Get concept set by ID and source
* @param id The concept set ID
* @param sourceKey The source key
* @return The concept set expression
*/
@GET
@Path("{id}/expression/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
public ConceptSetExpression getConceptSetExpression(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey) {
Source source = sourceService.findBySourceKey(sourceKey);
sourceAccessor.checkAccess(source);
return getConceptSetExpression(id, null, source.getSourceInfo());
}
private ConceptSetExpression getConceptSetExpression(int id, Integer version, SourceInfo sourceInfo) {
HashMap<Long, Concept> map = new HashMap<>();
// create our expression to return
ConceptSetExpression expression = new ConceptSetExpression();
ArrayList<ConceptSetExpression.ConceptSetItem> expressionItems = new ArrayList<>();
List<ConceptSetItem> repositoryItems = new ArrayList<>();
if (Objects.isNull(version)) {
getConceptSetItems(id).forEach(repositoryItems::add);
} else {
ConceptSetVersionFullDTO dto = getVersion(id, version);
repositoryItems.addAll(dto.getItems());
}
// collect the unique concept IDs so we can load the concept object later.
for (ConceptSetItem csi : repositoryItems) {
map.put(csi.getConceptId(), null);
}
// lookup the concepts we need information for
long[] identifiers = new long[map.size()];
int identifierIndex = 0;
for (Long identifier : map.keySet()) {
identifiers[identifierIndex] = identifier;
identifierIndex++;
}
String sourceKey;
if (Objects.isNull(sourceInfo)) {
sourceKey = sourceService.getPriorityVocabularySource().getSourceKey();
} else {
sourceKey = sourceInfo.sourceKey;
}
Collection<Concept> concepts = vocabService.executeIdentifierLookup(sourceKey, identifiers);
if (concepts.size() != identifiers.length) {
String ids = Arrays.stream(identifiers).boxed()
.filter(identifier -> concepts.stream().noneMatch(c -> c.conceptId.equals(identifier)))
.map(String::valueOf)
.collect(Collectors.joining(",", "(", ")"));
throw new ConceptNotExistException("Current data source does not contain required concepts " + ids);
}
for(Concept concept : concepts) {
map.put(concept.conceptId, concept); // associate the concept object to the conceptID in the map
}
// put the concept information into the expression along with the concept set item information
for (ConceptSetItem repositoryItem : repositoryItems) {
ConceptSetExpression.ConceptSetItem currentItem = new ConceptSetExpression.ConceptSetItem();
currentItem.concept = map.get(repositoryItem.getConceptId());
currentItem.includeDescendants = (repositoryItem.getIncludeDescendants() == 1);
currentItem.includeMapped = (repositoryItem.getIncludeMapped() == 1);
currentItem.isExcluded = (repositoryItem.getIsExcluded() == 1);
expressionItems.add(currentItem);
}
expression.items = expressionItems.toArray(new ConceptSetExpression.ConceptSetItem[0]); // this will return a new array
return expression;
}
/**
* Check if the concept set name exists (DEPRECATED)
*
* @summary DO NOT USE
* @deprecated
* @param id The concept set ID
* @param sourceKey The source key
* @return The concept set expression
*/
@Deprecated
@GET
@Path("{id}/{name}/exists")
@Produces(MediaType.APPLICATION_JSON)
public Response getConceptSetExistsDeprecated(@PathParam("id") final int id, @PathParam("name") String name) {
String warningMessage = "This method will be deprecated in the next release. Instead, please use the new REST endpoint: conceptset/{id}/exists?name={name}";
Collection<ConceptSet> cs = getConceptSetRepository().conceptSetExists(id, name);
return Response.ok(cs).header("Warning: 299", warningMessage).build();
}
/**
* Check if a concept set with the same name exists in the WebAPI
* database. The name is checked against the selected concept set ID
* to ensure that only the selected concept set ID has the name specified.
*
* @summary Concept set with same name exists
* @param id The concept set ID
* @param name The name of the concept set
* @return The count of concept sets with the name, excluding the
* specified concept set ID.
*/
@GET
@Path("/{id}/exists")
@Produces(MediaType.APPLICATION_JSON)
public int getCountCSetWithSameName(@PathParam("id") @DefaultValue("0") final int id, @QueryParam("name") String name) {
return getConceptSetRepository().getCountCSetWithSameName(id, name);
}
/**
* Update the concept set items for the selected concept set ID in the
* WebAPI database.
*
* The concept set has two parts: 1) the elements of the ConceptSetDTO that
* consist of the identifier, name, etc. 2) the concept set items which
* contain the concepts and their mapping (i.e. include descendants).
*
* @summary Update concept set items
* @param id The concept set ID
* @param items An array of ConceptSetItems
* @return Boolean: true if the save is successful
*/
@PUT
@Path("{id}/items")
@Produces(MediaType.APPLICATION_JSON)
@Transactional
public boolean saveConceptSetItems(@PathParam("id") final int id, ConceptSetItem[] items) {
getConceptSetItemRepository().deleteByConceptSetId(id);
for (ConceptSetItem csi : items) {
// ID must be set to null in case of copying from version, so the new item will be created
csi.setId(0);
csi.setConceptSetId(id);
getConceptSetItemRepository().save(csi);
}
return true;
}
/**
* Exports a list of concept sets, based on the conceptSetList argument,
* to one or more comma separated value (CSV) file(s), compresses the files
* into a ZIP file and sends the ZIP file to the client.
*
* @summary Export concept set list to CSV files
* @param conceptSetList A list of concept set identifiers in the format
* conceptset=<concept_set_id_1>+<concept_set_id_2>+<concept_set_id_n>
* @return
* @throws Exception
*/
@GET
@Path("/exportlist")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response exportConceptSetList(@QueryParam("conceptsets") final String conceptSetList) throws Exception {
ArrayList<Integer> conceptSetIds = new ArrayList<>();
try {
String[] conceptSetItems = conceptSetList.split("\\+");
for(String csi : conceptSetItems) {
conceptSetIds.add(Integer.valueOf(csi));
}
if (conceptSetIds.size() <= 0) {
throw new IllegalArgumentException("You must supply a querystring value for conceptsets that is of the form: ?conceptset=<concept_set_id_1>+<concept_set_id_2>+<concept_set_id_n>");
}
} catch (Exception e) {
throw e;
}
ByteArrayOutputStream baos;
Source source = sourceService.getPriorityVocabularySource();
ArrayList<ConceptSetExport> cs = new ArrayList<>();
Response response = null;
try {
// Load all of the concept sets requested
for (int i = 0; i < conceptSetIds.size(); i++) {
// Get the concept set information
cs.add(getConceptSetForExport(conceptSetIds.get(i), new SourceInfo(source)));
}
// Write Concept Set Expression to a CSV
baos = ExportUtil.writeConceptSetExportToCSVAndZip(cs);
response = Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", "attachment; filename=\"conceptSetExport.zip\"")
.build();
} catch (Exception ex) {
throw ex;
}
return response;
}
/**
* Exports a single concept set to a comma separated value (CSV)
* file, compresses to a ZIP file and sends to the client.
* @param id The concept set ID
* @return A zip file containing the exported concept set
* @throws Exception
*/
@GET
@Path("{id}/export")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response exportConceptSetToCSV(@PathParam("id") final String id) throws Exception {
return this.exportConceptSetList(id);
}
/**
* Save a new concept set to the WebAPI database
*
* @summary Create a new concept set
* @param conceptSetDTO The concept set to save
* @return The concept set saved with the concept set identifier
*/
@Path("/")
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public ConceptSetDTO createConceptSet(ConceptSetDTO conceptSetDTO) {
UserEntity user = userRepository.findByLogin(security.getSubject());
ConceptSet conceptSet = conversionService.convert(conceptSetDTO, ConceptSet.class);
ConceptSet updated = new ConceptSet();
updated.setCreatedBy(user);
updated.setCreatedDate(new Date());
updated.setTags(null);
updateConceptSet(updated, conceptSet);
return conversionService.convert(updated, ConceptSetDTO.class);
}
/**
* Creates a concept set name, based on the selected concept set ID,
* that is used when generating a copy of an existing concept set. This
* function is generally used in conjunction with the copy endpoint to
* create a unique name and then save a copy of an existing concept set.
*
* @sumamry Get concept set name suggestion for copying
* @param id The concept set ID
* @return A map of the new concept set name and the existing concept set
* name
*/
@GET
@Path("/{id}/copy-name")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> getNameForCopy (@PathParam("id") final int id){
ConceptSetDTO source = getConceptSet(id);
String name = NameUtils.getNameForCopy(source.getName(), this::getNamesLike, getConceptSetRepository().findByName(source.getName()));
return Collections.singletonMap(COPY_NAME, name);
}
public List<String> getNamesLike(String copyName) {
return getConceptSetRepository().findAllByNameStartsWith(copyName).stream().map(ConceptSet::getName).collect(Collectors.toList());
}
/**
* Updates the concept set for the selected concept set.
*
* The concept set has two parts: 1) the elements of the ConceptSetDTO that
* consist of the identifier, name, etc. 2) the concept set items which
* contain the concepts and their mapping (i.e. include descendants).
*
* @summary Update concept set
* @param id The concept set identifier
* @param conceptSetDTO The concept set header
* @return The
* @throws Exception
*/
@Path("/{id}")
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Transactional
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public ConceptSetDTO updateConceptSet(@PathParam("id") final int id, ConceptSetDTO conceptSetDTO) throws Exception {
ConceptSet updated = getConceptSetRepository().findById(id);
if (updated == null) {
throw new Exception("Concept Set does not exist.");
}
saveVersion(id);
ConceptSet conceptSet = conversionService.convert(conceptSetDTO, ConceptSet.class);
return conversionService.convert(updateConceptSet(updated, conceptSet), ConceptSetDTO.class);
}
private ConceptSet updateConceptSet(ConceptSet dst, ConceptSet src) {
UserEntity user = userRepository.findByLogin(security.getSubject());
dst.setName(src.getName());
dst.setDescription(src.getDescription());
dst.setModifiedDate(new Date());
dst.setModifiedBy(user);
dst = this.getConceptSetRepository().save(dst);
return dst;
}
private ConceptSetExport getConceptSetForExport(int conceptSetId, SourceInfo vocabSource) {
ConceptSetExport cs = new ConceptSetExport();
// Set the concept set id
cs.ConceptSetId = conceptSetId;
// Get the concept set information
cs.ConceptSetName = this.getConceptSet(conceptSetId).getName();
// Get the concept set expression
cs.csExpression = this.getConceptSetExpression(conceptSetId);
// Lookup the identifiers
cs.identifierConcepts = vocabService.executeIncludedConceptLookup(vocabSource.sourceKey, cs.csExpression); //vocabService.executeIdentifierLookup(vocabSource.sourceKey, conceptIds);
// Lookup the mapped items
cs.mappedConcepts = vocabService.executeMappedLookup(vocabSource.sourceKey, cs.csExpression);
return cs;
}
/**
* Get the concept set generation information for the selected concept
* set ID. This function only works with the configuration of the CEM
* data source.
*
* @link https://github.com/OHDSI/CommonEvidenceModel/wiki
*
* @summary Get concept set generation info
* @param id The concept set identifier.
* @return A collection of concept set generation info objects
*/
@GET
@Path("{id}/generationinfo")
@Produces(MediaType.APPLICATION_JSON)
public Collection<ConceptSetGenerationInfo> getConceptSetGenerationInfo(@PathParam("id") final int id) {
return this.conceptSetGenerationInfoRepository.findAllByConceptSetId(id);
}
/**
* Delete the selected concept set by concept set identifier
*
* @summary Delete concept set
* @param id The concept set ID
*/
@DELETE
@Transactional(rollbackOn = Exception.class, dontRollbackOn = EmptyResultDataAccessException.class)
@Path("{id}")
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public void deleteConceptSet(@PathParam("id") final int id) {
// Remove any generation info
try {
this.conceptSetGenerationInfoRepository.deleteByConceptSetId(id);
} catch (EmptyResultDataAccessException e) {
// Ignore - there may be no data
log.warn("Failed to delete Generation Info by ConceptSet with ID = {}, {}", id, e);
}
catch (Exception e) {
throw e;
}
// Remove the concept set items
try {
getConceptSetItemRepository().deleteByConceptSetId(id);
} catch (EmptyResultDataAccessException e) {
// Ignore - there may be no data
log.warn("Failed to delete ConceptSet items with ID = {}, {}", id, e);
}
catch (Exception e) {
throw e;
}
// Remove the concept set
try {
getConceptSetRepository().delete(id);
} catch (EmptyResultDataAccessException e) {
// Ignore - there may be no data
log.warn("Failed to delete ConceptSet with ID = {}, {}", id, e);
}
catch (Exception e) {
throw e;
}
}
/**
* Assign tag to Concept Set
*
* @summary Assign concept set tag
* @since v2.10.0
* @param id The concept set ID
* @param tagId The tag ID
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/")
@Transactional
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public void assignTag(@PathParam("id") final Integer id, final int tagId) {
ConceptSet entity = getConceptSetRepository().findById(id);
assignTag(entity, tagId);
}
/**
* Unassign tag from Concept Set
*
* @summary Remove tag from concept set
* @since v2.10.0
* @param id The concept set ID
* @param tagId The tag ID
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/{tagId}")
@Transactional
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public void unassignTag(@PathParam("id") final Integer id, @PathParam("tagId") final int tagId) {
ConceptSet entity = getConceptSetRepository().findById(id);
unassignTag(entity, tagId);
}
/**
* Assign protected tag to Concept Set
*
* @summary Assign protected concept set tag
* @since v2.10.0
* @param id The concept set ID
* @param tagId The tag ID
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/")
@Transactional
public void assignPermissionProtectedTag(@PathParam("id") final int id, final int tagId) {
assignTag(id, tagId);
}
/**
* Unassign protected tag from Concept Set
*
* @summary Remove protected concept set tag
* @since v2.10.0
* @param id The concept set ID
* @param tagId The tag ID
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/{tagId}")
@Transactional
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public void unassignPermissionProtectedTag(@PathParam("id") final int id, @PathParam("tagId") final int tagId) {
unassignTag(id, tagId);
}
/**
* Checks a concept set for diagnostic problems. At this time,
* this appears to be an endpoint used to check to see which tags
* are applied to a concept set.
*
* @summary Concept set tag check
* @since v2.10.0
* @param conceptSetDTO The concept set
* @return A check result
*/
@POST
@Path("/check")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Transactional
public CheckResult runDiagnostics(ConceptSetDTO conceptSetDTO) {
return new CheckResult(checker.check(conceptSetDTO));
}
/**
* Get a list of versions of the selected concept set
*
* @summary Get concept set version list
* @since v2.10.0
* @param id The concept set ID
* @return A list of version information
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/")
@Transactional
public List<VersionDTO> getVersions(@PathParam("id") final int id) {
List<VersionBase> versions = versionService.getVersions(VersionType.CONCEPT_SET, id);
return versions.stream()
.map(v -> conversionService.convert(v, VersionDTO.class))
.collect(Collectors.toList());
}
/**
* Get a specific version of a concept set
*
* @summary Get concept set by version
* @since v2.10.0
* @param id The concept set ID
* @param version The version ID
* @return The concept set for the selected version
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
@Transactional
public ConceptSetVersionFullDTO getVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
checkVersion(id, version, false);
ConceptSetVersion conceptSetVersion = versionService.getById(VersionType.CONCEPT_SET, id, version);
return conversionService.convert(conceptSetVersion, ConceptSetVersionFullDTO.class);
}
/**
* Update a specific version of a selected concept set
*
* @summary Update a concept set version
* @since v2.10.0
* @param id The concept set ID
* @param version The version ID
* @param updateDTO The version update
* @return The version information
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
@Transactional
public VersionDTO updateVersion(@PathParam("id") final int id, @PathParam("version") final int version,
VersionUpdateDTO updateDTO) {
checkVersion(id, version);
updateDTO.setAssetId(id);
updateDTO.setVersion(version);
ConceptSetVersion updated = versionService.update(VersionType.CONCEPT_SET, updateDTO);
return conversionService.convert(updated, VersionDTO.class);
}
/**
* Delete a version of a concept set
*
* @summary Delete a concept set version
* @since v2.10.0
* @param id The concept ID
* @param version THe version ID
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
@Transactional
public void deleteVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
checkVersion(id, version);
versionService.delete(VersionType.CONCEPT_SET, id, version);
}
/**
* Create a new asset from a specific version of the selected
* concept set
*
* @summary Create a concept set copy from a specific concept set version
* @since v2.10.0
* @param id The concept set ID
* @param version The version ID
* @return The concept set copy
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}/createAsset")
@Transactional
@CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true)
public ConceptSetDTO copyAssetFromVersion(@PathParam("id") final int id, @PathParam("version") final int version) {
checkVersion(id, version, false);
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | true |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/CDMResultsService.java | src/main/java/org/ohdsi/webapi/service/CDMResultsService.java | package org.ohdsi.webapi.service;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.achilles.aspect.AchillesCache;
import org.ohdsi.webapi.achilles.service.AchillesCacheService;
import org.ohdsi.webapi.cdmresults.AchillesCacheTasklet;
import org.ohdsi.webapi.cdmresults.CDMResultsCacheTasklet;
import org.ohdsi.webapi.cdmresults.DescendantRecordAndPersonCount;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.ohdsi.webapi.cdmresults.service.CDMCacheService;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.report.CDMDashboard;
import org.ohdsi.webapi.report.CDMDataDensity;
import org.ohdsi.webapi.report.CDMDeath;
import org.ohdsi.webapi.report.CDMObservationPeriod;
import org.ohdsi.webapi.report.CDMPersonSummary;
import org.ohdsi.webapi.report.CDMResultsAnalysisRunner;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.util.SourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.job.SimpleJob;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.DependsOn;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.Consumes;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import static org.ohdsi.webapi.Constants.Params.JOB_START_TIME;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.DASHBOARD;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.DATA_DENSITY;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.DEATH;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.DRILLDOWN;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.OBSERVATION_PERIOD;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.PERSON;
import static org.ohdsi.webapi.cdmresults.AchillesCacheTasklet.TREEMAP;
/**
* @author fdefalco
*/
@Path("/cdmresults")
@Component
@DependsOn({"jobInvalidator", "flyway"})
public class CDMResultsService extends AbstractDaoService implements InitializingBean {
private final Logger logger = LoggerFactory.getLogger(CDMResultsService.class);
private static final String CONCEPT_COUNT_SQL = "/resources/cdmresults/sql/getConceptRecordCount.sql";
private static final String CONCEPT_COUNT_PERSON_SQL = "/resources/cdmresults/sql/getConceptRecordPersonCount.sql";
@Autowired
private CDMResultsAnalysisRunner queryRunner;
@Autowired
private JobService jobService;
@Autowired
private JobBuilderFactory jobBuilders;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
private SourceService sourceService;
@Autowired
private SourceAccessor sourceAccessor;
@Autowired
private ObjectMapper objectMapper;
@Autowired
private JobRepository jobRepository;
@Value("${cdm.result.cache.warming.enable}")
private boolean cdmResultCacheWarmingEnable;
@Value("${cdm.cache.cron.warming.enable}")
private boolean cdmCacheCronWarmingEnable;
@Value("${cdm.cache.achilles.warming.enable}")
private boolean cdmAchillesCacheWarmingEnable;
@Value("${cache.achilles.usePersonCount:false}")
private boolean usePersonCount;
@Value("${cache.jobs.count:3}")
private int cacheJobsCount;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private AchillesCacheService cacheService;
@Autowired
private CDMCacheService cdmCacheService;
@Autowired
private ConversionService conversionService;
@Override
public void afterPropertiesSet() throws Exception {
queryRunner.init(this.getSourceDialect(), objectMapper);
warmCaches();
}
@Scheduled(cron = "${cdm.cache.cron.expression}")
public void scheduledWarmCaches(){
if (cdmCacheCronWarmingEnable) {
warmCaches();
}
}
/**
* Get the record count and descendant record count for one or more concepts in a single CDM database
*
* <p>
* This POST request accepts a json array containing one or more concept IDs. (e.g. [201826, 437827])
* </p>
*
* @param sourceKey The unique identifier for a CDM source (e.g. SYNPUF5PCT)
*
* @return A javascript object with one element per concept. Each element is an array of lenth two containing the
* record count and descendent record count for the concept.
*
* <p>
* [
* {
* "201826": [
* 612861,
* 653173
* ]
* },
* {
* "437827": [
* 224421,
* 224421
* ]
* }
* ]
* </p>
* For concept id "201826" in the SYNPUF5PCT data source the record count is 612861 and the descendant record count is 653173.
*/
@Path("{sourceKey}/conceptRecordCount")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<SimpleEntry<Integer, List<Long>>> getConceptRecordCount(@PathParam("sourceKey") String sourceKey, List<Integer> identifiers) {
Source source = sourceService.findBySourceKey(sourceKey);
if (source != null) {
List<CDMCacheEntity> entities = cdmCacheService.findAndCache(source, identifiers);
List<DescendantRecordCount> recordCounts = entities.stream()
.map(entity -> {
if (usePersonCount) {
return conversionService.convert(entity, DescendantRecordAndPersonCount.class);
} else {
return conversionService.convert(entity, DescendantRecordCount.class);
}
})
.collect(Collectors.toList());
return convertToResponse(recordCounts);
}
return Collections.emptyList();
}
private List<SimpleEntry<Integer, List<Long>>> convertToResponse(Collection<DescendantRecordCount> conceptRecordCounts) {
return conceptRecordCounts.stream()
.map(c -> new SimpleEntry<>(c.getId(), c.getValues()))
.collect(Collectors.toList());
}
/**
* Queries for dashboard report for the sourceKey
*
* @return CDMDashboard
*/
@GET
@Path("{sourceKey}/dashboard")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(DASHBOARD)
public CDMDashboard getDashboard(@PathParam("sourceKey")
final String sourceKey) {
return getRawDashboard(sourceKey);
}
public CDMDashboard getRawDashboard(final String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return queryRunner.getDashboard(getSourceJdbcTemplate(source), source);
}
/**
* Queries for person report for the sourceKey
*
* @return CDMPersonSummary
*/
@GET
@Path("{sourceKey}/person")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(PERSON)
public CDMPersonSummary getPerson(@PathParam("sourceKey") final String sourceKey) {
return getRawPerson(sourceKey);
}
public CDMPersonSummary getRawPerson(String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return this.queryRunner.getPersonResults(this.getSourceJdbcTemplate(source), source);
}
/**
* Warm the results cache for a selected source
*
* @summary Warm cache for source key
* @param sourceKey The source key
* @return The job execution information
*/
@GET
@Path("{sourceKey}/warmCache")
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource warmCache(@PathParam("sourceKey") final String sourceKey) {
return this.warmCacheByKey(sourceKey);
}
/**
* Refresh the results cache for a selected source
*
* @summary Refresh results cache
* @param sourceKey The source key
* @return The job execution resource
*/
@GET
@Path("{sourceKey}/refreshCache")
@Produces(MediaType.APPLICATION_JSON)
public JobExecutionResource refreshCache(@PathParam("sourceKey") final String sourceKey) {
if(isSecured() && isAdmin()) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
if (sourceAccessor.hasAccess(source)) {
JobExecutionResource jobExecutionResource = jobService.findJobByName(Constants.WARM_CACHE, getWarmCacheJobName(String.valueOf(source.getSourceId()),sourceKey));
if (jobExecutionResource == null) {
if (source.getDaimons().stream().anyMatch(sd -> Objects.equals(sd.getDaimonType(), SourceDaimon.DaimonType.Results))) {
return warmCacheByKey(source.getSourceKey());
}
} else {
return jobExecutionResource;
}
}
}
return new JobExecutionResource();
}
/**
* Clear the cdm_cache and achilles_cache for all sources
*
* @summary Clear the cdm_cache and achilles_cache for all sources
* @return void
* @throws ForbiddenException if the user is not an admin
*/
@POST
@Path("{sourceKey}/clearCache")
@Transactional()
public void clearCacheForSource(@PathParam("sourceKey") final String sourceKey) {
if (!isSecured() || !isAdmin()) {
throw new ForbiddenException();
}
Source source = getSourceRepository().findBySourceKey(sourceKey);
cacheService.clearCache(source);
cdmCacheService.clearCache(source);
}
/**
* Clear the cdm_cache and achilles_cache for all sources
*
* @summary Clear the cdm_cache and achilles_cache for all sources
* @return void
* @throws ForbiddenException if the user is not an admin
*/
@POST
@Path("clearCache")
@Transactional()
public void clearCache() {
if (!isSecured() || !isAdmin()) {
throw new ForbiddenException();
}
cacheService.clearCache();
cdmCacheService.clearCache();
}
/**
* Queries for data density report for the given sourceKey
*
* @param sourceKey The source key
* @return CDMDataDensity
*/
@GET
@Path("{sourceKey}/datadensity")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(DATA_DENSITY)
public CDMDataDensity getDataDensity(@PathParam("sourceKey") final String sourceKey) {
return getRawDataDesity(sourceKey);
}
public CDMDataDensity getRawDataDesity(String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return this.queryRunner.getDataDensityResults(this.getSourceJdbcTemplate(source), source);
}
/**
* Queries for death report for the given sourceKey Queries for treemap
* results
*
* @return CDMDataDensity
*/
@GET
@Path("{sourceKey}/death")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(DEATH)
public CDMDeath getDeath(@PathParam("sourceKey") final String sourceKey) {
return getRawDeath(sourceKey);
}
public CDMDeath getRawDeath(String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return this.queryRunner.getDeathResults(this.getSourceJdbcTemplate(source), source);
}
/**
* Queries for observation period report for the given sourceKey
*
* @return CDMDataDensity
*/
@GET
@Path("{sourceKey}/observationPeriod")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(OBSERVATION_PERIOD)
public CDMObservationPeriod getObservationPeriod(@PathParam("sourceKey") final String sourceKey) {
return getRawObservationPeriod(sourceKey);
}
public CDMObservationPeriod getRawObservationPeriod(String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return this.queryRunner.getObservationPeriodResults(this.getSourceJdbcTemplate(source), source);
}
/**
* Queries for domain treemap results
*
* @return List<ArrayNode>
*/
@GET
@Path("{sourceKey}/{domain}/")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(TREEMAP)
public ArrayNode getTreemap(
@PathParam("domain")
final String domain,
@PathParam("sourceKey")
final String sourceKey) {
return getRawTreeMap(domain, sourceKey);
}
public ArrayNode getRawTreeMap(String domain, String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
return queryRunner.getTreemap(this.getSourceJdbcTemplate(source), domain, source);
}
/**
* Queries for drilldown results
*
* @param domain The domain for the drilldown
* @param conceptId The concept ID
* @param sourceKey The source key
* @return The JSON results
*/
@GET
@Path("{sourceKey}/{domain}/{conceptId}")
@Produces(MediaType.APPLICATION_JSON)
@AchillesCache(DRILLDOWN)
public JsonNode getDrilldown(@PathParam("domain")
final String domain,
@PathParam("conceptId")
final int conceptId,
@PathParam("sourceKey")
final String sourceKey) {
return getRawDrilldown(domain, conceptId, sourceKey);
}
public JsonNode getRawDrilldown(String domain, int conceptId, String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
JdbcTemplate jdbcTemplate = this.getSourceJdbcTemplate(source);
return queryRunner.getDrilldown(jdbcTemplate, domain, conceptId, source);
}
private JobExecutionResource warmCacheByKey(String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
if (jobService.findJobByName(getWarmCacheJobName(String.valueOf(source.getSourceId()), sourceKey), getWarmCacheJobName(String.valueOf(source.getSourceId()), sourceKey)) == null) {
return warmCaches(source);
} else {
return new JobExecutionResource();
}
}
private void warmCaches(){
Collection<Source> sources = sourceService.getSources();
warmCaches(sources);
if (logger.isInfoEnabled()) {
List<String> sourceNames = sources
.stream()
.filter(s -> !SourceUtils.hasSourceDaimon(s, SourceDaimon.DaimonType.Vocabulary)
|| !SourceUtils.hasSourceDaimon(s, SourceDaimon.DaimonType.Results))
.map(Source::getSourceName)
.collect(Collectors.toList());
if (!sourceNames.isEmpty()) {
logger.info("Following sources do not have Vocabulary or Result schema and will not be cached: {}",
sourceNames.stream().collect(Collectors.joining(", ")));
}
}
}
/*
* Warm cache for a single source
*/
private JobExecutionResource warmCaches(Source source) {
if (!cdmResultCacheWarmingEnable) {
logger.info("Cache warming is disabled for CDM results");
return new JobExecutionResource();
}
if (!SourceUtils.hasSourceDaimon(source, SourceDaimon.DaimonType.Vocabulary) || !SourceUtils.hasSourceDaimon(source, SourceDaimon.DaimonType.Results)) {
logger.info("Cache wouldn't be applied to sources without Vocabulary and Result schemas, source [{}] was omitted", source.getSourceName());
return new JobExecutionResource();
}
int resultDaimonPriority = getResultsDaimonPriority(source);
if (!cdmAchillesCacheWarmingEnable && resultDaimonPriority <= 0) {
logger.info("Cache wouldn't be applied to sources with result daimon priority <= 0 AND when the Achilles cache is disabled, source [{}] was omitted", source.getSourceName());
return new JobExecutionResource();
}
String jobName = getWarmCacheJobName(String.valueOf(source.getSourceId()), source.getSourceKey());
List<Step> jobSteps = createCacheWarmingJobSteps(source, jobName);
SimpleJobBuilder builder = createJob(jobName, jobSteps);
return runJob(source.getSourceKey(), source.getSourceId(), jobName, builder);
}
/*
* Warm cache for a collection of sources
*/
private void warmCaches(Collection<Source> sources) {
if (!cdmResultCacheWarmingEnable) {
logger.info("Cache warming is disabled for CDM results");
return;
}
List<Source> vocabularySources = sources.stream()
.filter(s -> SourceUtils.hasSourceDaimon(s, SourceDaimon.DaimonType.Vocabulary)
&& SourceUtils.hasSourceDaimon(s, SourceDaimon.DaimonType.Results)
&& s.isIsCacheEnabled())
.collect(Collectors.toList());
long[] bucketSizes = getBucketSizes(vocabularySources);
int bucketIndex = 0, counter = 0;
List<Integer> sourceIds = new ArrayList<>();
List<String> sourceKeys = new ArrayList<>();
List<Step> allJobSteps = new ArrayList<>();
for (Source source : vocabularySources) {
sourceIds.add(source.getSourceId());
sourceKeys.add(source.getSourceKey());
String jobName = getWarmCacheJobName(String.valueOf(source.getSourceId()), source.getSourceKey());
// Check whether cache job for current source already exists
if (jobService.findJobByName(jobName, jobName) == null) {
// Create the job step
List<Step> jobSteps = createCacheWarmingJobSteps(source, jobName);
// get priority of the results daimon
int priority = getResultsDaimonPriority(source);
// if source has results daimon with high priority - put it at the beginning of the queue
if (priority > 0) {
allJobSteps.addAll(0, jobSteps);
} else {
allJobSteps.addAll(jobSteps);
}
}
if (counter++ >= bucketSizes[bucketIndex] - 1) {
if (!allJobSteps.isEmpty()) {
String compositeJobName = getWarmCacheJobName(sourceIds.stream().map(String::valueOf)
.collect(Collectors.joining(",")), String.join(",", sourceKeys));
SimpleJobBuilder builder = createJob(compositeJobName, allJobSteps);
runJob(source.getSourceKey(), source.getSourceId(), jobName, builder);
}
bucketIndex++;
counter = 0;
sourceIds.clear();
sourceKeys.clear();
allJobSteps.clear();
}
}
}
private SimpleJobBuilder createJob(String jobName, List<Step> steps) {
final SimpleJobBuilder[] stepBuilder = {null};
if (jobService.findJobByName(jobName, jobName) == null && !steps.isEmpty()) {
JobBuilder jobBuilder = jobBuilders.get(jobName);
steps.forEach(step -> {
if (stepBuilder[0] != null) {
stepBuilder[0].next(step);
} else {
stepBuilder[0] = jobBuilder.start(step);
}
});
}
return stepBuilder[0];
}
/*
* Runs the job and returns the JobExecutionResource
*/
private JobExecutionResource runJob(String sourceKey, int sourceId, String jobName, SimpleJobBuilder stepBuilder) {
return jobService.runJob(stepBuilder.build(), new JobParametersBuilder()
.addString(Constants.Params.JOB_NAME, jobName)
// batch_job_execution_params.string_val is varchar(250). too many source keys can exceed 250 symbols
.addString(Constants.Params.SOURCE_KEY, StringUtils.substring(sourceKey, 0, 250))
.addString(Constants.Params.SOURCE_ID, String.valueOf(sourceId))
.toJobParameters());
}
private List<Step> createCacheWarmingJobSteps(Source source, String jobName) {
int resultDaimonPriority = getResultsDaimonPriority(source);
SimpleJob job = new SimpleJob(jobName);
job.setJobRepository(jobRepository);
List<Step> steps = new ArrayList<>();
if (cdmAchillesCacheWarmingEnable) {
steps.add(getAchillesStep(source, jobName));
}
if (resultDaimonPriority > 0) {
steps.add(getCountStep(source, jobName));
}
return steps;
}
private Step getAchillesStep(Source source, String jobStepName) {
CDMResultsService instance = applicationContext.getBean(CDMResultsService.class);
AchillesCacheTasklet achillesTasklet = new AchillesCacheTasklet(source, instance, cacheService,
queryRunner, objectMapper);
return stepBuilderFactory.get(jobStepName + " achilles")
.tasklet(achillesTasklet)
.build();
}
private Step getCountStep(Source source, String jobStepName) {
CDMResultsCacheTasklet countTasklet = new CDMResultsCacheTasklet(source, cdmCacheService);
return stepBuilderFactory.get(jobStepName + " counts")
.tasklet(countTasklet)
.build();
}
private int getResultsDaimonPriority(Source source) {
Optional<Integer> resultsPriority = source.getDaimons().stream()
.filter(d -> d.getDaimonType().equals(SourceDaimon.DaimonType.Results))
.map(SourceDaimon::getPriority)
.filter(p -> p > 0)
.findAny();
return resultsPriority.orElse(0);
}
private String getWarmCacheJobName(String sourceIds, String sourceKeys) {
return getJobName("warming cache", sourceIds, sourceKeys);
}
private String getJobName(String jobType, String sourceIds, String sourceKeys) {
// for multiple sources: try to compose a job name from source keys, and if it is too long - use source ids
String jobName = String.format("%s: %s", jobType, sourceKeys);
if (jobName.length() >= 100) { // job name in batch_job_instance is varchar(100)
jobName = String.format("%s: %s", jobType, sourceIds);
if (jobName.length() >= 100) { // if we still have more than 100 symbols
jobName = jobName.substring(0, 88);
jobName = jobName.substring(0, jobName.lastIndexOf(','))
.concat(" and more..."); // todo: this is quick fix. need better solution
}
}
return jobName;
}
private long[] getBucketSizes(List<Source> vocabularySources) {
int jobCount = cacheJobsCount;
long bucketSize, size = vocabularySources.size();
long[] bucketSizes = new long[cacheJobsCount];
// Get sizes of all buckets so that their values are approximately equal
while (jobCount > 0) {
if (jobCount > 1) {
bucketSize = Math.round(Math.floor(size * 1.0 / jobCount));
} else {
bucketSize = size;
}
bucketSizes[cacheJobsCount - jobCount] = bucketSize;
jobCount--;
size -= bucketSize;
}
return bucketSizes;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java | src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java | package org.ohdsi.webapi.service;
import com.odysseusinc.arachne.commons.types.DBMSType;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.DataSourceUnsecuredDTO;
import com.odysseusinc.datasourcemanager.krblogin.KerberosService;
import com.odysseusinc.datasourcemanager.krblogin.KrbConfig;
import com.odysseusinc.datasourcemanager.krblogin.RuntimeServiceMode;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authz.UnauthorizedException;
import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization;
import org.ohdsi.analysis.pathway.design.PathwayAnalysis;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.IExecutionInfo;
import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.common.sensitiveinfo.AbstractAdminService;
import org.ohdsi.webapi.conceptset.ConceptSet;
import org.ohdsi.webapi.conceptset.ConceptSetComparison;
import org.ohdsi.webapi.conceptset.ConceptSetItemRepository;
import org.ohdsi.webapi.conceptset.ConceptSetRepository;
import org.ohdsi.webapi.conceptset.annotation.ConceptSetAnnotationRepository;
import org.ohdsi.webapi.exception.BadRequestAtlasException;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis;
import org.ohdsi.webapi.model.CommonEntity;
import org.ohdsi.webapi.model.CommonEntityExt;
import org.ohdsi.webapi.pathway.domain.PathwayAnalysisEntity;
import org.ohdsi.webapi.reusable.domain.Reusable;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.management.DisabledSecurity;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceHelper;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.tag.TagSecurityUtils;
import org.ohdsi.webapi.tag.TagService;
import org.ohdsi.webapi.tag.domain.Tag;
import org.ohdsi.webapi.util.CancelableJdbcTemplate;
import org.ohdsi.webapi.util.DataSourceDTOParser;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.ForbiddenException;
import java.io.File;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
public abstract class AbstractDaoService extends AbstractAdminService {
protected final Logger log = LoggerFactory.getLogger(getClass());
@Value("${datasource.ohdsi.schema}")
private String ohdsiSchema;
@Value("${datasource.dialect}")
private String dialect;
@Value("${datasource.dialect.source}")
private String sourceDialect;
@Value("${source.name}")
private String sourceName;
@Value("${cdm.version}")
private String cdmVersion;
@Value("${jdbc.suppressInvalidApiException}")
protected boolean suppressApiException;
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private SourceRepository sourceRepository;
@Autowired
private ConceptSetItemRepository conceptSetItemRepository;
@Autowired
private ConceptSetAnnotationRepository conceptSetAnnotationRepository;
@Autowired
protected Security security;
@Autowired
protected UserRepository userRepository;
public static final List<GenerationStatus> INVALIDATE_STATUSES = new ArrayList<GenerationStatus>() {{
add(GenerationStatus.PENDING);
add(GenerationStatus.RUNNING);
}};
public ConceptSetItemRepository getConceptSetItemRepository() {
return conceptSetItemRepository;
}
public ConceptSetAnnotationRepository getConceptSetAnnotationRepository() {
return conceptSetAnnotationRepository;
}
@Autowired
private ConceptSetRepository conceptSetRepository;
public ConceptSetRepository getConceptSetRepository() {
return conceptSetRepository;
}
@Autowired
private TransactionTemplate transactionTemplate;
@Autowired
private TransactionTemplate transactionTemplateRequiresNew;
@Autowired
private TransactionTemplate transactionTemplateNoTransaction;
@Autowired
private KerberosService kerberosService;
@Autowired
private SourceHelper sourceHelper;
@Autowired
private TagService tagService;
@Autowired
private PermissionService permissionService;
@Autowired
private ConversionService conversionService;
public SourceRepository getSourceRepository() {
return sourceRepository;
}
/**
* @return the dialect
*/
public String getDialect() {
return dialect;
}
/**
* @param dialect the dialect to set
*/
public void setDialect(String dialect) {
this.dialect = dialect;
}
/**
* @return the jdbcTemplate
*/
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
public CancelableJdbcTemplate getSourceJdbcTemplate(Source source) {
DriverManagerDataSource dataSource = getDriverManagerDataSource(source);
CancelableJdbcTemplate jdbcTemplate = new CancelableJdbcTemplate(dataSource);
jdbcTemplate.setSuppressApiException(suppressApiException);
return jdbcTemplate;
}
public <T> T executeInTransaction(Source source, Function<JdbcTemplate, TransactionCallback<T>> callbackFunction) {
DriverManagerDataSource dataSource = getDriverManagerDataSource(source);
CancelableJdbcTemplate jdbcTemplate = new CancelableJdbcTemplate(dataSource);
jdbcTemplate.setSuppressApiException(suppressApiException);
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(dataSource);
TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
return transactionTemplate.execute(callbackFunction.apply(jdbcTemplate));
}
private DriverManagerDataSource getDriverManagerDataSource(Source source) {
DataSourceUnsecuredDTO dataSourceData = DataSourceDTOParser.parseDTO(source);
if (dataSourceData.getUseKerberos()) {
loginToKerberos(dataSourceData);
}
DriverManagerDataSource dataSource;
String connectionString = sourceHelper.getSourceConnectionString(source);
if (dataSourceData.getUsername() != null && dataSourceData.getPassword() != null) {
// NOTE: jdbc link should NOT include username and password, because they have higher priority than separate ones
dataSource = new DriverManagerDataSource(
connectionString,
dataSourceData.getUsername(),
dataSourceData.getPassword()
);
} else {
dataSource = new DriverManagerDataSource(connectionString);
}
if (DBMSType.SNOWFLAKE.getValue().equalsIgnoreCase(source.getSourceDialect())) {
if (dataSource.getConnectionProperties() == null) {
dataSource.setConnectionProperties(new Properties());
}
dataSource.getConnectionProperties().setProperty("CLIENT_RESULT_COLUMN_CASE_INSENSITIVE", "true");
}
return dataSource;
}
private void loginToKerberos(DataSourceUnsecuredDTO dataSourceData) {
File temporaryDir = com.google.common.io.Files.createTempDir();
KrbConfig krbConfig = new KrbConfig();
try {
krbConfig = kerberosService.runKinit(dataSourceData, RuntimeServiceMode.SINGLE, temporaryDir);
} catch (RuntimeException | IOException e) {
log.error("Login to kerberos failed", e);
}
try {
FileUtils.forceDelete(temporaryDir);
if (krbConfig.getComponents() != null && StringUtils.isNotBlank(krbConfig.getComponents().getKeytabPath().toString())){
FileUtils.forceDelete(krbConfig.getComponents().getKeytabPath().toFile());
}
} catch (IOException e) {
log.warn(e.getMessage(), e);
}
}
/**
* @return the sourceDialect
*/
public String getSourceDialect() {
return sourceDialect;
}
/**
* @param sourceDialect the sourceDialect to set
*/
public void setSourceDialect(String sourceDialect) {
this.sourceDialect = sourceDialect;
}
/**
* @return the sourceName
*/
public String getSourceName() {
return sourceName;
}
/**
* @param sourceName the sourceName to set
*/
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
/**
* @return the cdmVersion
*/
public String getCdmVersion() {
return cdmVersion;
}
/**
* @param cdmVersion the cdmVersion to set
*/
public void setCdmVersion(String cdmVersion) {
this.cdmVersion = cdmVersion;
}
protected List<Map<String, String>> genericResultSetLoader(PreparedStatementRenderer psr, Source source) {
List<Map<String, String>> results = null;
try {
results = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowMapper<Map<String, String>>() {
@Override
public Map<String, String> mapRow(ResultSet rs, int rowNum)
throws SQLException {
Map<String, String> result = new HashMap<String, String>();
ResultSetMetaData metaData = rs.getMetaData();
int colCount = metaData.getColumnCount();
for (int i = 1; i <= colCount; i++) {
String columnLabel = metaData.getColumnLabel(i);
String columnValue = String.valueOf(rs.getObject(i));
result.put(columnLabel, columnValue);
}
return result;
}
});
} catch (Exception e) {
log.error("Result set loading error", e);
}
return results;
}
/**
* @return the transactionTemplate
*/
public TransactionTemplate getTransactionTemplate() {
return transactionTemplate;
}
/**
* @return the transactionTemplateRequiresNew
*/
public TransactionTemplate getTransactionTemplateRequiresNew() {
return transactionTemplateRequiresNew;
}
/**
* @return the transactionTemplateNoTransaction
*/
public TransactionTemplate getTransactionTemplateNoTransaction() {
return transactionTemplateNoTransaction;
}
/**
* @return the ohdsiSchema
*/
public String getOhdsiSchema() {
return ohdsiSchema;
}
protected IExecutionInfo invalidateExecution(IExecutionInfo executionInfo) {
return executionInfo.setIsValid(false)
.setStatus(GenerationStatus.COMPLETE)
.setMessage("Invalidated by system");
}
protected void invalidateExecutions(List<? extends IExecutionInfo> executionInfoList) {
executionInfoList.forEach(this::invalidateExecution);
}
protected UserEntity getCurrentUser() {
return userRepository.findByLogin(getCurrentUserLogin());
}
protected String getCurrentUserLogin() {
return security.getSubject();
}
protected PermissionService getPermissionService() {
return this.permissionService;
}
protected void assignTag(CommonEntityExt<?> entity, int tagId) {
checkOwnerOrAdminOrGrantedOrTagManager(entity);
if (Objects.nonNull(entity)) {
Tag tag = tagService.getById(tagId);
if (Objects.nonNull(tag)) {
if (tag.isPermissionProtected() && !hasPermissionToAssignProtectedTags(entity, "post")) {
throw new UnauthorizedException(String.format("No permission to assign protected tag '%s' to %s (id=%s).",
tag.getName(), entity.getClass().getSimpleName(), entity.getId()));
}
// unassign tags from the same group if group marked as multi_selection=false
tag.getGroups().stream().findFirst().ifPresent(group -> {
if (!group.isMultiSelection()) {
entity.getTags().forEach(t -> {
if (t.getGroups().stream().anyMatch(g -> g.getId().equals(group.getId()))) {
unassignTag(entity, t.getId());
}
});
}
});
entity.getTags().add(tag);
}
}
}
protected void unassignTag(CommonEntityExt<?> entity, int tagId) {
checkOwnerOrAdminOrGrantedOrTagManager(entity);
if (Objects.nonNull(entity)) {
Tag tag = tagService.getById(tagId);
if (Objects.nonNull(tag)) {
if (tag.isPermissionProtected() && !hasPermissionToAssignProtectedTags(entity, "delete")) {
throw new UnauthorizedException(String.format("No permission to unassign protected tag '%s' from %s (id=%s).",
tag.getName(), entity.getClass().getSimpleName(), entity.getId()));
}
Set<Tag> tags = entity.getTags().stream()
.filter(t -> t.getId() != tagId)
.collect(Collectors.toSet());
entity.setTags(tags);
}
}
}
private boolean hasPermissionToAssignProtectedTags(final CommonEntityExt<?> entity, final String method) {
if (!isSecured()) {
return true;
}
return TagSecurityUtils.checkPermission(TagSecurityUtils.getAssetName(entity), method);
}
protected void checkOwnerOrAdmin(UserEntity owner) {
if (security instanceof DisabledSecurity) {
return;
}
UserEntity user = getCurrentUser();
Long ownerId = Objects.nonNull(owner) ? owner.getId() : null;
if (!(user.getId().equals(ownerId) || isAdmin())) {
throw new ForbiddenException();
}
}
protected void checkOwnerOrAdminOrModerator(UserEntity owner) {
if (security instanceof DisabledSecurity) {
return;
}
UserEntity user = getCurrentUser();
Long ownerId = Objects.nonNull(owner) ? owner.getId() : null;
if (!(user.getId().equals(ownerId) || isAdmin() || isModerator())) {
throw new ForbiddenException();
}
}
protected void checkOwnerOrAdminOrGranted(CommonEntity<?> entity) {
if (security instanceof DisabledSecurity) {
return;
}
UserEntity user = getCurrentUser();
Long ownerId = Objects.nonNull(entity.getCreatedBy()) ? entity.getCreatedBy().getId() : null;
if (!(user.getId().equals(ownerId) || isAdmin() || permissionService.hasWriteAccess(entity))) {
throw new ForbiddenException();
}
}
protected void checkOwnerOrAdminOrGrantedOrTagManager(CommonEntity<?> entity) {
if (security instanceof DisabledSecurity) {
return;
}
UserEntity user = getCurrentUser();
Long ownerId = Objects.nonNull(entity.getCreatedBy()) ? entity.getCreatedBy().getId() : null;
if (!(user.getId().equals(ownerId) || isAdmin() || permissionService.hasWriteAccess(entity) || TagSecurityUtils.canManageTags())) {
throw new ForbiddenException();
}
}
protected <T extends CommonEntityDTO> List<T> listByTags(List<? extends CommonEntityExt<? extends Number>> entities,
List<String> names,
Class<T> clazz) {
return entities.stream()
.filter(e -> e.getTags().stream()
.map(tag -> tag.getName().toLowerCase(Locale.ROOT))
.collect(Collectors.toList())
.containsAll(names))
.map(entity -> {
T dto = conversionService.convert(entity, clazz);
permissionService.fillWriteAccess(entity, dto);
return dto;
})
.collect(Collectors.toList());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/IRAnalysisResource.java | src/main/java/org/ohdsi/webapi/service/IRAnalysisResource.java | package org.ohdsi.webapi.service;
import org.ohdsi.webapi.check.CheckResult;
import org.ohdsi.webapi.common.generation.GenerateSqlResult;
import org.ohdsi.webapi.ircalc.AnalysisReport;
import org.ohdsi.webapi.ircalc.dto.IRVersionFullDTO;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.service.dto.AnalysisInfoDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisShortDTO;
import org.ohdsi.webapi.tag.domain.HasTags;
import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO;
import org.ohdsi.webapi.versioning.dto.VersionDTO;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
@Path("/ir/")
public interface IRAnalysisResource extends HasTags<Integer> {
/**
* Returns all IR Analysis in a list.
*
* @return List of IncidenceRateAnalysis
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
List<IRAnalysisShortDTO> getIRAnalysisList();
@GET
@Path("/{id}/exists")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
int getCountIRWithSameName(@PathParam("id") @DefaultValue("0") final int id, @QueryParam("name") String name);
/**
* Creates the incidence rate analysis
*
* @param analysis The analysis to create.
* @return The new FeasibilityStudy
*/
@POST
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
IRAnalysisDTO createAnalysis(IRAnalysisDTO analysis);
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
IRAnalysisDTO getAnalysis(@PathParam("id") final int id);
@POST
@Path("/design")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
IRAnalysisDTO doImport(final IRAnalysisDTO dto);
@GET
@Path("/{id}/design")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
IRAnalysisDTO export(@PathParam("id") final Integer id);
@PUT
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
IRAnalysisDTO saveAnalysis(@PathParam("id") final int id, IRAnalysisDTO analysis);
@GET
@Path("/{analysis_id}/execute/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
JobExecutionResource performAnalysis(@PathParam("analysis_id") final int analysisId, @PathParam("sourceKey") final String sourceKey);
@DELETE
@Path("/{analysis_id}/execute/{sourceKey}")
void cancelAnalysis(@PathParam("analysis_id") final int analysisId, @PathParam("sourceKey") final String sourceKey);
@GET
@Path("/{id}/info")
@Produces(MediaType.APPLICATION_JSON)
List<AnalysisInfoDTO> getAnalysisInfo(@PathParam("id") final int id);
@GET
@Path("/{id}/info/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
AnalysisInfoDTO getAnalysisInfo(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey);
/**
* Deletes the specified cohort definition
*
* @param id - the Cohort Definition ID to copy
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/info/{sourceKey}")
void deleteInfo(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey);
/**
* Deletes the specified cohort definition
*
* @param id - the Cohort Definition ID to copy
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}")
void delete(@PathParam("id") final int id);
/**
* Exports the analysis definition and results
*
* @param id - the IR Analysis ID to export
* @return Response containing binary stream of zipped data
*/
@GET
@Path("/{id}/export")
Response export(@PathParam("id") final int id);
/**
* Copies the specified cohort definition
*
* @param id - the Cohort Definition ID to copy
* @return the copied cohort definition as a CohortDefinitionDTO
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/copy")
IRAnalysisDTO copy(@PathParam("id") final int id);
@GET
@Path("/{id}/report/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
AnalysisReport getAnalysisReport(@PathParam("id") final int id, @PathParam("sourceKey") final String sourceKey,
@QueryParam("targetId") final int targetId, @QueryParam("outcomeId") final int outcomeId );
@Path("/sql")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public GenerateSqlResult generateSql(IRAnalysisService.GenerateSqlRequest request);
@POST
@Path("/check")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public CheckResult runDiagnostics(IRAnalysisDTO irAnalysisDTO);
/**
* Assign tag to IR Analysis
*
* @param id
* @param tagId
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/")
void assignTag(@PathParam("id") final Integer id, final int tagId);
/**
* Unassign tag from IR Analysis
*
* @param id
* @param tagId
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/tag/{tagId}")
void unassignTag(@PathParam("id") final Integer id, @PathParam("tagId") final int tagId);
/**
* Assign protected tag to IR Analysis
*
* @param id
* @param tagId
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/")
void assignPermissionProtectedTag(@PathParam("id") final int id, final int tagId);
/**
* Unassign protected tag from IR Analysis
*
* @param id
* @param tagId
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/protectedtag/{tagId}")
void unassignPermissionProtectedTag(@PathParam("id") final int id, @PathParam("tagId") final int tagId);
/**
* Get list of versions of IR Analysis
*
* @param id
* @return
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/")
List<VersionDTO> getVersions(@PathParam("id") final long id);
/**
* Get version of IR Analysis
*
* @param id
* @param version
* @return
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
IRVersionFullDTO getVersion(@PathParam("id") final int id, @PathParam("version") final int version);
/**
* Update version of IR Analysis
*
* @param id
* @param version
* @param updateDTO
* @return
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
VersionDTO updateVersion(@PathParam("id") final int id, @PathParam("version") final int version,
VersionUpdateDTO updateDTO);
/**
* Delete version of IR Analysis
*
* @param id
* @param version
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}")
void deleteVersion(@PathParam("id") final int id, @PathParam("version") final int version) ;
/**
* Create a new asset form version of IR Analysis
*
* @param id
* @param version
* @return
*/
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/version/{version}/createAsset")
IRAnalysisDTO copyAssetFromVersion(@PathParam("id") final int id, @PathParam("version") final int version);
/**
* Get list of incidence rates with assigned tags
*
* @param requestDTO
* @return
*/
@POST
@Path("/byTags")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
List<IRAnalysisDTO> listByTags(TagNameListRequestDTO requestDTO);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/CopyAnnotationsRequest.java | src/main/java/org/ohdsi/webapi/service/dto/CopyAnnotationsRequest.java | package org.ohdsi.webapi.service.dto;
public class CopyAnnotationsRequest {
private int sourceConceptSetId;
private int targetConceptSetId;
public int getSourceConceptSetId() {
return sourceConceptSetId;
}
public int getTargetConceptSetId() {
return targetConceptSetId;
}
public void setSourceConceptSetId(int sourceConceptSetId) {
this.sourceConceptSetId = sourceConceptSetId;
}
public void setTargetConceptSetId(int targetConceptSetId) {
this.targetConceptSetId = targetConceptSetId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/AnalysisInfoDTO.java | src/main/java/org/ohdsi/webapi/service/dto/AnalysisInfoDTO.java | package org.ohdsi.webapi.service.dto;
import org.ohdsi.webapi.ircalc.AnalysisReport;
import org.ohdsi.webapi.ircalc.ExecutionInfo;
import java.util.ArrayList;
import java.util.List;
public class AnalysisInfoDTO {
private ExecutionInfo executionInfo;
private List<AnalysisReport.Summary> summaryList = new ArrayList<>();
public ExecutionInfo getExecutionInfo() {
return executionInfo;
}
public void setExecutionInfo(ExecutionInfo executionInfo) {
this.executionInfo = executionInfo;
}
public List<AnalysisReport.Summary> getSummaryList() {
return summaryList;
}
public void setSummaryList(List<AnalysisReport.Summary> summaryList) {
this.summaryList = summaryList;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java | src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java | package org.ohdsi.webapi.service.dto;
public class ConceptSetDTO extends CommonEntityExtDTO {
private Integer id;
private String name;
private String description;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisDTO.java | src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisDTO.java | package org.ohdsi.webapi.service.dto;
public class IRAnalysisDTO extends IRAnalysisShortDTO {
private String expression;
public String getExpression() {
return expression;
}
public void setExpression(String expression) {
this.expression = expression;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/PatientLevelPredictionAnalysisDTO.java | src/main/java/org/ohdsi/webapi/service/dto/PatientLevelPredictionAnalysisDTO.java | package org.ohdsi.webapi.service.dto;
public class PatientLevelPredictionAnalysisDTO extends CommonEntityDTO {
private Integer analysisId;
private String name;
private Integer treatmentId;
private Integer outcomeId;
private String modelType;
private int timeAtRiskStart;
private int timeAtRiskEnd;
private int addExposureDaysToEnd;
private int minimumWashoutPeriod;
private int minimumDaysAtRisk;
private int requireTimeAtRisk;
private int minimumTimeAtRisk;
private int sample;
private int sampleSize;
private int firstExposureOnly;
private int includeAllOutcomes;
private int rmPriorOutcomes;
private int priorOutcomeLookback;
private int testSplit;
private String testFraction;
private String nFold;
private String moAlpha;
private String moClassWeight;
private String moIndexFolder;
private String moK;
private String moLearnRate;
private String moLearningRate;
private String moMaxDepth;
private String moMinImpuritySplit;
private String moMinRows;
private String moMinSamplesLeaf;
private String moMinSamplesSplit;
private String moMTries;
private String moNEstimators;
private String moNThread;
private String moNTrees;
private String moPlot;
private String moSeed;
private String moSize;
private String moVariance;
private String moVarImp;
private int cvDemographics;
private int cvExclusionId;
private int cvInclusionId;
private int cvDemographicsGender;
private int cvDemographicsRace;
private int cvDemographicsEthnicity;
private int cvDemographicsAge;
private int cvDemographicsYear;
private int cvDemographicsMonth;
private int cvConditionOcc;
private int cvConditionOcc365d;
private int cvConditionOcc30d;
private int cvConditionOccInpt180d;
private int cvConditionEra;
private int cvConditionEraEver;
private int cvConditionEraOverlap;
private int cvConditionGroup;
private int cvConditionGroupMeddra;
private int cvConditionGroupSnomed;
private int cvDrugExposure;
private int cvDrugExposure365d;
private int cvDrugExposure30d;
private int cvDrugEra;
private int cvDrugEra365d;
private int cvDrugEra30d;
private int cvDrugEraOverlap;
private int cvDrugEraEver;
private int cvDrugGroup;
private int cvProcedureOcc;
private int cvProcedureOcc365d;
private int cvProcedureOcc30d;
private int cvProcedureGroup;
private int cvObservation;
private int cvObservation365d;
private int cvObservation30d;
private int cvObservationCount365d;
private int cvMeasurement;
private int cvMeasurement365d;
private int cvMeasurement30d;
private int cvMeasurementCount365d;
private int cvMeasurementBelow;
private int cvMeasurementAbove;
private int cvConceptCounts;
private int cvRiskScores;
private int cvRiskScoresCharlson;
private int cvRiskScoresDcsi;
private int cvRiskScoresChads2;
private int cvRiskScoresChads2vasc;
private int cvInteractionYear;
private int cvInteractionMonth;
private int delCovariatesSmallCount;
public Integer getId() {
return analysisId;
}
public void setAnalysisId(Integer analysisId) {
this.analysisId = analysisId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getTreatmentId() {
return treatmentId;
}
public void setTreatmentId(Integer treatmentId) {
this.treatmentId = treatmentId;
}
public Integer getOutcomeId() {
return outcomeId;
}
public void setOutcomeId(Integer outcomeId) {
this.outcomeId = outcomeId;
}
public String getModelType() {
return modelType;
}
public void setModelType(String modelType) {
this.modelType = modelType;
}
public int getTimeAtRiskStart() {
return timeAtRiskStart;
}
public void setTimeAtRiskStart(int timeAtRiskStart) {
this.timeAtRiskStart = timeAtRiskStart;
}
public int getTimeAtRiskEnd() {
return timeAtRiskEnd;
}
public void setTimeAtRiskEnd(int timeAtRiskEnd) {
this.timeAtRiskEnd = timeAtRiskEnd;
}
public int getAddExposureDaysToEnd() {
return addExposureDaysToEnd;
}
public void setAddExposureDaysToEnd(int addExposureDaysToEnd) {
this.addExposureDaysToEnd = addExposureDaysToEnd;
}
public int getMinimumWashoutPeriod() {
return minimumWashoutPeriod;
}
public void setMinimumWashoutPeriod(int minimumWashoutPeriod) {
this.minimumWashoutPeriod = minimumWashoutPeriod;
}
public int getMinimumDaysAtRisk() {
return minimumDaysAtRisk;
}
public void setMinimumDaysAtRisk(int minimumDaysAtRisk) {
this.minimumDaysAtRisk = minimumDaysAtRisk;
}
public int getRequireTimeAtRisk() {
return requireTimeAtRisk;
}
public void setRequireTimeAtRisk(int requireTimeAtRisk) {
this.requireTimeAtRisk = requireTimeAtRisk;
}
public int getMinimumTimeAtRisk() {
return minimumTimeAtRisk;
}
public void setMinimumTimeAtRisk(int minimumTimeAtRisk) {
this.minimumTimeAtRisk = minimumTimeAtRisk;
}
public int getSample() {
return sample;
}
public void setSample(int sample) {
this.sample = sample;
}
public int getSampleSize() {
return sampleSize;
}
public void setSampleSize(int sampleSize) {
this.sampleSize = sampleSize;
}
public int getFirstExposureOnly() {
return firstExposureOnly;
}
public void setFirstExposureOnly(int firstExposureOnly) {
this.firstExposureOnly = firstExposureOnly;
}
public int getIncludeAllOutcomes() {
return includeAllOutcomes;
}
public void setIncludeAllOutcomes(int includeAllOutcomes) {
this.includeAllOutcomes = includeAllOutcomes;
}
public int getRmPriorOutcomes() {
return rmPriorOutcomes;
}
public void setRmPriorOutcomes(int rmPriorOutcomes) {
this.rmPriorOutcomes = rmPriorOutcomes;
}
public int getPriorOutcomeLookback() {
return priorOutcomeLookback;
}
public void setPriorOutcomeLookback(int priorOutcomeLookback) {
this.priorOutcomeLookback = priorOutcomeLookback;
}
public int getTestSplit() {
return testSplit;
}
public void setTestSplit(int testSplit) {
this.testSplit = testSplit;
}
public String getTestFraction() {
return testFraction;
}
public void setTestFraction(String testFraction) {
this.testFraction = testFraction;
}
public String getnFold() {
return nFold;
}
public void setnFold(String nFold) {
this.nFold = nFold;
}
public String getMoAlpha() {
return moAlpha;
}
public void setMoAlpha(String moAlpha) {
this.moAlpha = moAlpha;
}
public String getMoClassWeight() {
return moClassWeight;
}
public void setMoClassWeight(String moClassWeight) {
this.moClassWeight = moClassWeight;
}
public String getMoIndexFolder() {
return moIndexFolder;
}
public void setMoIndexFolder(String moIndexFolder) {
this.moIndexFolder = moIndexFolder;
}
public String getMoK() {
return moK;
}
public void setMoK(String moK) {
this.moK = moK;
}
public String getMoLearnRate() {
return moLearnRate;
}
public void setMoLearnRate(String moLearnRate) {
this.moLearnRate = moLearnRate;
}
public String getMoLearningRate() {
return moLearningRate;
}
public void setMoLearningRate(String moLearningRate) {
this.moLearningRate = moLearningRate;
}
public String getMoMaxDepth() {
return moMaxDepth;
}
public void setMoMaxDepth(String moMaxDepth) {
this.moMaxDepth = moMaxDepth;
}
public String getMoMinImpuritySplit() {
return moMinImpuritySplit;
}
public void setMoMinImpuritySplit(String moMinImpuritySplit) {
this.moMinImpuritySplit = moMinImpuritySplit;
}
public String getMoMinRows() {
return moMinRows;
}
public void setMoMinRows(String moMinRows) {
this.moMinRows = moMinRows;
}
public String getMoMinSamplesLeaf() {
return moMinSamplesLeaf;
}
public void setMoMinSamplesLeaf(String moMinSamplesLeaf) {
this.moMinSamplesLeaf = moMinSamplesLeaf;
}
public String getMoMinSamplesSplit() {
return moMinSamplesSplit;
}
public void setMoMinSamplesSplit(String moMinSamplesSplit) {
this.moMinSamplesSplit = moMinSamplesSplit;
}
public String getMoMTries() {
return moMTries;
}
public void setMoMTries(String moMTries) {
this.moMTries = moMTries;
}
public String getMoNEstimators() {
return moNEstimators;
}
public void setMoNEstimators(String moNEstimators) {
this.moNEstimators = moNEstimators;
}
public String getMoNThread() {
return moNThread;
}
public void setMoNThread(String moNThread) {
this.moNThread = moNThread;
}
public String getMoNTrees() {
return moNTrees;
}
public void setMoNTrees(String moNTrees) {
this.moNTrees = moNTrees;
}
public String getMoPlot() {
return moPlot;
}
public void setMoPlot(String moPlot) {
this.moPlot = moPlot;
}
public String getMoSeed() {
return moSeed;
}
public void setMoSeed(String moSeed) {
this.moSeed = moSeed;
}
public String getMoSize() {
return moSize;
}
public void setMoSize(String moSize) {
this.moSize = moSize;
}
public String getMoVariance() {
return moVariance;
}
public void setMoVariance(String moVariance) {
this.moVariance = moVariance;
}
public String getMoVarImp() {
return moVarImp;
}
public void setMoVarImp(String moVarImp) {
this.moVarImp = moVarImp;
}
public int getCvDemographics() {
return cvDemographics;
}
public void setCvDemographics(int cvDemographics) {
this.cvDemographics = cvDemographics;
}
public int getCvExclusionId() {
return cvExclusionId;
}
public void setCvExclusionId(int cvExclusionId) {
this.cvExclusionId = cvExclusionId;
}
public int getCvInclusionId() {
return cvInclusionId;
}
public void setCvInclusionId(int cvInclusionId) {
this.cvInclusionId = cvInclusionId;
}
public int getCvDemographicsGender() {
return cvDemographicsGender;
}
public void setCvDemographicsGender(int cvDemographicsGender) {
this.cvDemographicsGender = cvDemographicsGender;
}
public int getCvDemographicsRace() {
return cvDemographicsRace;
}
public void setCvDemographicsRace(int cvDemographicsRace) {
this.cvDemographicsRace = cvDemographicsRace;
}
public int getCvDemographicsEthnicity() {
return cvDemographicsEthnicity;
}
public void setCvDemographicsEthnicity(int cvDemographicsEthnicity) {
this.cvDemographicsEthnicity = cvDemographicsEthnicity;
}
public int getCvDemographicsAge() {
return cvDemographicsAge;
}
public void setCvDemographicsAge(int cvDemographicsAge) {
this.cvDemographicsAge = cvDemographicsAge;
}
public int getCvDemographicsYear() {
return cvDemographicsYear;
}
public void setCvDemographicsYear(int cvDemographicsYear) {
this.cvDemographicsYear = cvDemographicsYear;
}
public int getCvDemographicsMonth() {
return cvDemographicsMonth;
}
public void setCvDemographicsMonth(int cvDemographicsMonth) {
this.cvDemographicsMonth = cvDemographicsMonth;
}
public int getCvConditionOcc() {
return cvConditionOcc;
}
public void setCvConditionOcc(int cvConditionOcc) {
this.cvConditionOcc = cvConditionOcc;
}
public int getCvConditionOcc365d() {
return cvConditionOcc365d;
}
public void setCvConditionOcc365d(int cvConditionOcc365d) {
this.cvConditionOcc365d = cvConditionOcc365d;
}
public int getCvConditionOcc30d() {
return cvConditionOcc30d;
}
public void setCvConditionOcc30d(int cvConditionOcc30d) {
this.cvConditionOcc30d = cvConditionOcc30d;
}
public int getCvConditionOccInpt180d() {
return cvConditionOccInpt180d;
}
public void setCvConditionOccInpt180d(int cvConditionOccInpt180d) {
this.cvConditionOccInpt180d = cvConditionOccInpt180d;
}
public int getCvConditionEra() {
return cvConditionEra;
}
public void setCvConditionEra(int cvConditionEra) {
this.cvConditionEra = cvConditionEra;
}
public int getCvConditionEraEver() {
return cvConditionEraEver;
}
public void setCvConditionEraEver(int cvConditionEraEver) {
this.cvConditionEraEver = cvConditionEraEver;
}
public int getCvConditionEraOverlap() {
return cvConditionEraOverlap;
}
public void setCvConditionEraOverlap(int cvConditionEraOverlap) {
this.cvConditionEraOverlap = cvConditionEraOverlap;
}
public int getCvConditionGroup() {
return cvConditionGroup;
}
public void setCvConditionGroup(int cvConditionGroup) {
this.cvConditionGroup = cvConditionGroup;
}
public int getCvConditionGroupMeddra() {
return cvConditionGroupMeddra;
}
public void setCvConditionGroupMeddra(int cvConditionGroupMeddra) {
this.cvConditionGroupMeddra = cvConditionGroupMeddra;
}
public int getCvConditionGroupSnomed() {
return cvConditionGroupSnomed;
}
public void setCvConditionGroupSnomed(int cvConditionGroupSnomed) {
this.cvConditionGroupSnomed = cvConditionGroupSnomed;
}
public int getCvDrugExposure() {
return cvDrugExposure;
}
public void setCvDrugExposure(int cvDrugExposure) {
this.cvDrugExposure = cvDrugExposure;
}
public int getCvDrugExposure365d() {
return cvDrugExposure365d;
}
public void setCvDrugExposure365d(int cvDrugExposure365d) {
this.cvDrugExposure365d = cvDrugExposure365d;
}
public int getCvDrugExposure30d() {
return cvDrugExposure30d;
}
public void setCvDrugExposure30d(int cvDrugExposure30d) {
this.cvDrugExposure30d = cvDrugExposure30d;
}
public int getCvDrugEra() {
return cvDrugEra;
}
public void setCvDrugEra(int cvDrugEra) {
this.cvDrugEra = cvDrugEra;
}
public int getCvDrugEra365d() {
return cvDrugEra365d;
}
public void setCvDrugEra365d(int cvDrugEra365d) {
this.cvDrugEra365d = cvDrugEra365d;
}
public int getCvDrugEra30d() {
return cvDrugEra30d;
}
public void setCvDrugEra30d(int cvDrugEra30d) {
this.cvDrugEra30d = cvDrugEra30d;
}
public int getCvDrugEraOverlap() {
return cvDrugEraOverlap;
}
public void setCvDrugEraOverlap(int cvDrugEraOverlap) {
this.cvDrugEraOverlap = cvDrugEraOverlap;
}
public int getCvDrugEraEver() {
return cvDrugEraEver;
}
public void setCvDrugEraEver(int cvDrugEraEver) {
this.cvDrugEraEver = cvDrugEraEver;
}
public int getCvDrugGroup() {
return cvDrugGroup;
}
public void setCvDrugGroup(int cvDrugGroup) {
this.cvDrugGroup = cvDrugGroup;
}
public int getCvProcedureOcc() {
return cvProcedureOcc;
}
public void setCvProcedureOcc(int cvProcedureOcc) {
this.cvProcedureOcc = cvProcedureOcc;
}
public int getCvProcedureOcc365d() {
return cvProcedureOcc365d;
}
public void setCvProcedureOcc365d(int cvProcedureOcc365d) {
this.cvProcedureOcc365d = cvProcedureOcc365d;
}
public int getCvProcedureOcc30d() {
return cvProcedureOcc30d;
}
public void setCvProcedureOcc30d(int cvProcedureOcc30d) {
this.cvProcedureOcc30d = cvProcedureOcc30d;
}
public int getCvProcedureGroup() {
return cvProcedureGroup;
}
public void setCvProcedureGroup(int cvProcedureGroup) {
this.cvProcedureGroup = cvProcedureGroup;
}
public int getCvObservation() {
return cvObservation;
}
public void setCvObservation(int cvObservation) {
this.cvObservation = cvObservation;
}
public int getCvObservation365d() {
return cvObservation365d;
}
public void setCvObservation365d(int cvObservation365d) {
this.cvObservation365d = cvObservation365d;
}
public int getCvObservation30d() {
return cvObservation30d;
}
public void setCvObservation30d(int cvObservation30d) {
this.cvObservation30d = cvObservation30d;
}
public int getCvObservationCount365d() {
return cvObservationCount365d;
}
public void setCvObservationCount365d(int cvObservationCount365d) {
this.cvObservationCount365d = cvObservationCount365d;
}
public int getCvMeasurement() {
return cvMeasurement;
}
public void setCvMeasurement(int cvMeasurement) {
this.cvMeasurement = cvMeasurement;
}
public int getCvMeasurement365d() {
return cvMeasurement365d;
}
public void setCvMeasurement365d(int cvMeasurement365d) {
this.cvMeasurement365d = cvMeasurement365d;
}
public int getCvMeasurement30d() {
return cvMeasurement30d;
}
public void setCvMeasurement30d(int cvMeasurement30d) {
this.cvMeasurement30d = cvMeasurement30d;
}
public int getCvMeasurementCount365d() {
return cvMeasurementCount365d;
}
public void setCvMeasurementCount365d(int cvMeasurementCount365d) {
this.cvMeasurementCount365d = cvMeasurementCount365d;
}
public int getCvMeasurementBelow() {
return cvMeasurementBelow;
}
public void setCvMeasurementBelow(int cvMeasurementBelow) {
this.cvMeasurementBelow = cvMeasurementBelow;
}
public int getCvMeasurementAbove() {
return cvMeasurementAbove;
}
public void setCvMeasurementAbove(int cvMeasurementAbove) {
this.cvMeasurementAbove = cvMeasurementAbove;
}
public int getCvConceptCounts() {
return cvConceptCounts;
}
public void setCvConceptCounts(int cvConceptCounts) {
this.cvConceptCounts = cvConceptCounts;
}
public int getCvRiskScores() {
return cvRiskScores;
}
public void setCvRiskScores(int cvRiskScores) {
this.cvRiskScores = cvRiskScores;
}
public int getCvRiskScoresCharlson() {
return cvRiskScoresCharlson;
}
public void setCvRiskScoresCharlson(int cvRiskScoresCharlson) {
this.cvRiskScoresCharlson = cvRiskScoresCharlson;
}
public int getCvRiskScoresDcsi() {
return cvRiskScoresDcsi;
}
public void setCvRiskScoresDcsi(int cvRiskScoresDcsi) {
this.cvRiskScoresDcsi = cvRiskScoresDcsi;
}
public int getCvRiskScoresChads2() {
return cvRiskScoresChads2;
}
public void setCvRiskScoresChads2(int cvRiskScoresChads2) {
this.cvRiskScoresChads2 = cvRiskScoresChads2;
}
public int getCvRiskScoresChads2vasc() {
return cvRiskScoresChads2vasc;
}
public void setCvRiskScoresChads2vasc(int cvRiskScoresChads2vasc) {
this.cvRiskScoresChads2vasc = cvRiskScoresChads2vasc;
}
public int getCvInteractionYear() {
return cvInteractionYear;
}
public void setCvInteractionYear(int cvInteractionYear) {
this.cvInteractionYear = cvInteractionYear;
}
public int getCvInteractionMonth() {
return cvInteractionMonth;
}
public void setCvInteractionMonth(int cvInteractionMonth) {
this.cvInteractionMonth = cvInteractionMonth;
}
public int getDelCovariatesSmallCount() {
return delCovariatesSmallCount;
}
public void setDelCovariatesSmallCount(int delCovariatesSmallCount) {
this.delCovariatesSmallCount = delCovariatesSmallCount;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/ComparativeCohortAnalysisDTO.java | src/main/java/org/ohdsi/webapi/service/dto/ComparativeCohortAnalysisDTO.java | package org.ohdsi.webapi.service.dto;
public class ComparativeCohortAnalysisDTO extends CommonEntityDTO {
private Integer analysisId;
private String name;
private Integer treatmentId;
private Integer comparatorId;
private Integer outcomeId;
private String modelType;
private int timeAtRiskStart;
private int timeAtRiskEnd;
private int addExposureDaysToEnd;
private int minimumWashoutPeriod;
private int minimumDaysAtRisk;
private int rmSubjectsInBothCohorts;
private int rmPriorOutcomes;
private int psAdjustment;
private int psExclusionId;
private int psInclusionId;
private int psDemographics;
private int psDemographicsGender;
private int psDemographicsRace;
private int psDemographicsEthnicity;
private int psDemographicsAge;
private int psDemographicsYear;
private int psDemographicsMonth;
private int psTrim;
private int psTrimFraction;
private int psMatch;
private int psMatchMaxRatio;
private int psStrat;
private int psStratNumStrata;
private int psConditionOcc;
private int psConditionOcc365d;
private int psConditionOcc30d;
private int psConditionOccInpt180d;
private int psConditionEra;
private int psConditionEraEver;
private int psConditionEraOverlap;
private int psConditionGroup;
private int psConditionGroupMeddra;
private int psConditionGroupSnomed;
private int psDrugExposure;
private int psDrugExposure365d;
private int psDrugExposure30d;
private int psDrugEra;
private int psDrugEra365d;
private int psDrugEra30d;
private int psDrugEraOverlap;
private int psDrugEraEver;
private int psDrugGroup;
private int psProcedureOcc;
private int psProcedureOcc365d;
private int psProcedureOcc30d;
private int psProcedureGroup;
private int psObservation;
private int psObservation365d;
private int psObservation30d;
private int psObservationCount365d;
private int psMeasurement;
private int psMeasurement365d;
private int psMeasurement30d;
private int psMeasurementCount365d;
private int psMeasurementBelow;
private int psMeasurementAbove;
private int psConceptCounts;
private int psRiskScores;
private int psRiskScoresCharlson;
private int psRiskScoresDcsi;
private int psRiskScoresChads2;
private int psRiskScoresChads2vasc;
private int psInteractionYear;
private int psInteractionMonth;
private int omCovariates;
private int omExclusionId;
private int omInclusionId;
private int omDemographics;
private int omDemographicsGender;
private int omDemographicsRace;
private int omDemographicsEthnicity;
private int omDemographicsAge;
private int omDemographicsYear;
private int omDemographicsMonth;
private int omTrim;
private int omTrimFraction;
private int omMatch;
private int omMatchMaxRatio;
private int omStrat;
private int omStratNumStrata;
private int omConditionOcc;
private int omConditionOcc365d;
private int omConditionOcc30d;
private int omConditionOccInpt180d;
private int omConditionEra;
private int omConditionEraEver;
private int omConditionEraOverlap;
private int omConditionGroup;
private int omConditionGroupMeddra;
private int omConditionGroupSnomed;
private int omDrugExposure;
private int omDrugExposure365d;
private int omDrugExposure30d;
private int omDrugEra;
private int omDrugEra365d;
private int omDrugEra30d;
private int omDrugEraOverlap;
private int omDrugEraEver;
private int omDrugGroup;
private int omProcedureOcc;
private int omProcedureOcc365d;
private int omProcedureOcc30d;
private int omProcedureGroup;
private int omObservation;
private int omObservation365d;
private int omObservation30d;
private int omObservationCount365d;
private int omMeasurement;
private int omMeasurement365d;
private int omMeasurement30d;
private int omMeasurementCount365d;
private int omMeasurementBelow;
private int omMeasurementAbove;
private int omConceptCounts;
private int omRiskScores;
private int omRiskScoresCharlson;
private int omRiskScoresDcsi;
private int omRiskScoresChads2;
private int omRiskScoresChads2vasc;
private int omInteractionYear;
private int omInteractionMonth;
private int delCovariatesSmallCount;
private int negativeControlId;
public Integer getId() {
return analysisId;
}
public void setAnalysisId(Integer analysisId) {
this.analysisId = analysisId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getTreatmentId() {
return treatmentId;
}
public void setTreatmentId(Integer treatmentId) {
this.treatmentId = treatmentId;
}
public Integer getComparatorId() {
return comparatorId;
}
public void setComparatorId(Integer comparatorId) {
this.comparatorId = comparatorId;
}
public Integer getOutcomeId() {
return outcomeId;
}
public void setOutcomeId(Integer outcomeId) {
this.outcomeId = outcomeId;
}
public String getModelType() {
return modelType;
}
public void setModelType(String modelType) {
this.modelType = modelType;
}
public int getTimeAtRiskStart() {
return timeAtRiskStart;
}
public void setTimeAtRiskStart(int timeAtRiskStart) {
this.timeAtRiskStart = timeAtRiskStart;
}
public int getTimeAtRiskEnd() {
return timeAtRiskEnd;
}
public void setTimeAtRiskEnd(int timeAtRiskEnd) {
this.timeAtRiskEnd = timeAtRiskEnd;
}
public int getAddExposureDaysToEnd() {
return addExposureDaysToEnd;
}
public void setAddExposureDaysToEnd(int addExposureDaysToEnd) {
this.addExposureDaysToEnd = addExposureDaysToEnd;
}
public int getMinimumWashoutPeriod() {
return minimumWashoutPeriod;
}
public void setMinimumWashoutPeriod(int minimumWashoutPeriod) {
this.minimumWashoutPeriod = minimumWashoutPeriod;
}
public int getMinimumDaysAtRisk() {
return minimumDaysAtRisk;
}
public void setMinimumDaysAtRisk(int minimumDaysAtRisk) {
this.minimumDaysAtRisk = minimumDaysAtRisk;
}
public int getRmSubjectsInBothCohorts() {
return rmSubjectsInBothCohorts;
}
public void setRmSubjectsInBothCohorts(int rmSubjectsInBothCohorts) {
this.rmSubjectsInBothCohorts = rmSubjectsInBothCohorts;
}
public int getRmPriorOutcomes() {
return rmPriorOutcomes;
}
public void setRmPriorOutcomes(int rmPriorOutcomes) {
this.rmPriorOutcomes = rmPriorOutcomes;
}
public int getPsAdjustment() {
return psAdjustment;
}
public void setPsAdjustment(int psAdjustment) {
this.psAdjustment = psAdjustment;
}
public int getPsExclusionId() {
return psExclusionId;
}
public void setPsExclusionId(int psExclusionId) {
this.psExclusionId = psExclusionId;
}
public int getPsInclusionId() {
return psInclusionId;
}
public void setPsInclusionId(int psInclusionId) {
this.psInclusionId = psInclusionId;
}
public int getPsDemographics() {
return psDemographics;
}
public void setPsDemographics(int psDemographics) {
this.psDemographics = psDemographics;
}
public int getPsDemographicsGender() {
return psDemographicsGender;
}
public void setPsDemographicsGender(int psDemographicsGender) {
this.psDemographicsGender = psDemographicsGender;
}
public int getPsDemographicsRace() {
return psDemographicsRace;
}
public void setPsDemographicsRace(int psDemographicsRace) {
this.psDemographicsRace = psDemographicsRace;
}
public int getPsDemographicsEthnicity() {
return psDemographicsEthnicity;
}
public void setPsDemographicsEthnicity(int psDemographicsEthnicity) {
this.psDemographicsEthnicity = psDemographicsEthnicity;
}
public int getPsDemographicsAge() {
return psDemographicsAge;
}
public void setPsDemographicsAge(int psDemographicsAge) {
this.psDemographicsAge = psDemographicsAge;
}
public int getPsDemographicsYear() {
return psDemographicsYear;
}
public void setPsDemographicsYear(int psDemographicsYear) {
this.psDemographicsYear = psDemographicsYear;
}
public int getPsDemographicsMonth() {
return psDemographicsMonth;
}
public void setPsDemographicsMonth(int psDemographicsMonth) {
this.psDemographicsMonth = psDemographicsMonth;
}
public int getPsTrim() {
return psTrim;
}
public void setPsTrim(int psTrim) {
this.psTrim = psTrim;
}
public int getPsTrimFraction() {
return psTrimFraction;
}
public void setPsTrimFraction(int psTrimFraction) {
this.psTrimFraction = psTrimFraction;
}
public int getPsMatch() {
return psMatch;
}
public void setPsMatch(int psMatch) {
this.psMatch = psMatch;
}
public int getPsMatchMaxRatio() {
return psMatchMaxRatio;
}
public void setPsMatchMaxRatio(int psMatchMaxRatio) {
this.psMatchMaxRatio = psMatchMaxRatio;
}
public int getPsStrat() {
return psStrat;
}
public void setPsStrat(int psStrat) {
this.psStrat = psStrat;
}
public int getPsStratNumStrata() {
return psStratNumStrata;
}
public void setPsStratNumStrata(int psStratNumStrata) {
this.psStratNumStrata = psStratNumStrata;
}
public int getPsConditionOcc() {
return psConditionOcc;
}
public void setPsConditionOcc(int psConditionOcc) {
this.psConditionOcc = psConditionOcc;
}
public int getPsConditionOcc365d() {
return psConditionOcc365d;
}
public void setPsConditionOcc365d(int psConditionOcc365d) {
this.psConditionOcc365d = psConditionOcc365d;
}
public int getPsConditionOcc30d() {
return psConditionOcc30d;
}
public void setPsConditionOcc30d(int psConditionOcc30d) {
this.psConditionOcc30d = psConditionOcc30d;
}
public int getPsConditionOccInpt180d() {
return psConditionOccInpt180d;
}
public void setPsConditionOccInpt180d(int psConditionOccInpt180d) {
this.psConditionOccInpt180d = psConditionOccInpt180d;
}
public int getPsConditionEra() {
return psConditionEra;
}
public void setPsConditionEra(int psConditionEra) {
this.psConditionEra = psConditionEra;
}
public int getPsConditionEraEver() {
return psConditionEraEver;
}
public void setPsConditionEraEver(int psConditionEraEver) {
this.psConditionEraEver = psConditionEraEver;
}
public int getPsConditionEraOverlap() {
return psConditionEraOverlap;
}
public void setPsConditionEraOverlap(int psConditionEraOverlap) {
this.psConditionEraOverlap = psConditionEraOverlap;
}
public int getPsConditionGroup() {
return psConditionGroup;
}
public void setPsConditionGroup(int psConditionGroup) {
this.psConditionGroup = psConditionGroup;
}
public int getPsConditionGroupMeddra() {
return psConditionGroupMeddra;
}
public void setPsConditionGroupMeddra(int psConditionGroupMeddra) {
this.psConditionGroupMeddra = psConditionGroupMeddra;
}
public int getPsConditionGroupSnomed() {
return psConditionGroupSnomed;
}
public void setPsConditionGroupSnomed(int psConditionGroupSnomed) {
this.psConditionGroupSnomed = psConditionGroupSnomed;
}
public int getPsDrugExposure() {
return psDrugExposure;
}
public void setPsDrugExposure(int psDrugExposure) {
this.psDrugExposure = psDrugExposure;
}
public int getPsDrugExposure365d() {
return psDrugExposure365d;
}
public void setPsDrugExposure365d(int psDrugExposure365d) {
this.psDrugExposure365d = psDrugExposure365d;
}
public int getPsDrugExposure30d() {
return psDrugExposure30d;
}
public void setPsDrugExposure30d(int psDrugExposure30d) {
this.psDrugExposure30d = psDrugExposure30d;
}
public int getPsDrugEra() {
return psDrugEra;
}
public void setPsDrugEra(int psDrugEra) {
this.psDrugEra = psDrugEra;
}
public int getPsDrugEra365d() {
return psDrugEra365d;
}
public void setPsDrugEra365d(int psDrugEra365d) {
this.psDrugEra365d = psDrugEra365d;
}
public int getPsDrugEra30d() {
return psDrugEra30d;
}
public void setPsDrugEra30d(int psDrugEra30d) {
this.psDrugEra30d = psDrugEra30d;
}
public int getPsDrugEraOverlap() {
return psDrugEraOverlap;
}
public void setPsDrugEraOverlap(int psDrugEraOverlap) {
this.psDrugEraOverlap = psDrugEraOverlap;
}
public int getPsDrugEraEver() {
return psDrugEraEver;
}
public void setPsDrugEraEver(int psDrugEraEver) {
this.psDrugEraEver = psDrugEraEver;
}
public int getPsDrugGroup() {
return psDrugGroup;
}
public void setPsDrugGroup(int psDrugGroup) {
this.psDrugGroup = psDrugGroup;
}
public int getPsProcedureOcc() {
return psProcedureOcc;
}
public void setPsProcedureOcc(int psProcedureOcc) {
this.psProcedureOcc = psProcedureOcc;
}
public int getPsProcedureOcc365d() {
return psProcedureOcc365d;
}
public void setPsProcedureOcc365d(int psProcedureOcc365d) {
this.psProcedureOcc365d = psProcedureOcc365d;
}
public int getPsProcedureOcc30d() {
return psProcedureOcc30d;
}
public void setPsProcedureOcc30d(int psProcedureOcc30d) {
this.psProcedureOcc30d = psProcedureOcc30d;
}
public int getPsProcedureGroup() {
return psProcedureGroup;
}
public void setPsProcedureGroup(int psProcedureGroup) {
this.psProcedureGroup = psProcedureGroup;
}
public int getPsObservation() {
return psObservation;
}
public void setPsObservation(int psObservation) {
this.psObservation = psObservation;
}
public int getPsObservation365d() {
return psObservation365d;
}
public void setPsObservation365d(int psObservation365d) {
this.psObservation365d = psObservation365d;
}
public int getPsObservation30d() {
return psObservation30d;
}
public void setPsObservation30d(int psObservation30d) {
this.psObservation30d = psObservation30d;
}
public int getPsObservationCount365d() {
return psObservationCount365d;
}
public void setPsObservationCount365d(int psObservationCount365d) {
this.psObservationCount365d = psObservationCount365d;
}
public int getPsMeasurement() {
return psMeasurement;
}
public void setPsMeasurement(int psMeasurement) {
this.psMeasurement = psMeasurement;
}
public int getPsMeasurement365d() {
return psMeasurement365d;
}
public void setPsMeasurement365d(int psMeasurement365d) {
this.psMeasurement365d = psMeasurement365d;
}
public int getPsMeasurement30d() {
return psMeasurement30d;
}
public void setPsMeasurement30d(int psMeasurement30d) {
this.psMeasurement30d = psMeasurement30d;
}
public int getPsMeasurementCount365d() {
return psMeasurementCount365d;
}
public void setPsMeasurementCount365d(int psMeasurementCount365d) {
this.psMeasurementCount365d = psMeasurementCount365d;
}
public int getPsMeasurementBelow() {
return psMeasurementBelow;
}
public void setPsMeasurementBelow(int psMeasurementBelow) {
this.psMeasurementBelow = psMeasurementBelow;
}
public int getPsMeasurementAbove() {
return psMeasurementAbove;
}
public void setPsMeasurementAbove(int psMeasurementAbove) {
this.psMeasurementAbove = psMeasurementAbove;
}
public int getPsConceptCounts() {
return psConceptCounts;
}
public void setPsConceptCounts(int psConceptCounts) {
this.psConceptCounts = psConceptCounts;
}
public int getPsRiskScores() {
return psRiskScores;
}
public void setPsRiskScores(int psRiskScores) {
this.psRiskScores = psRiskScores;
}
public int getPsRiskScoresCharlson() {
return psRiskScoresCharlson;
}
public void setPsRiskScoresCharlson(int psRiskScoresCharlson) {
this.psRiskScoresCharlson = psRiskScoresCharlson;
}
public int getPsRiskScoresDcsi() {
return psRiskScoresDcsi;
}
public void setPsRiskScoresDcsi(int psRiskScoresDcsi) {
this.psRiskScoresDcsi = psRiskScoresDcsi;
}
public int getPsRiskScoresChads2() {
return psRiskScoresChads2;
}
public void setPsRiskScoresChads2(int psRiskScoresChads2) {
this.psRiskScoresChads2 = psRiskScoresChads2;
}
public int getPsRiskScoresChads2vasc() {
return psRiskScoresChads2vasc;
}
public void setPsRiskScoresChads2vasc(int psRiskScoresChads2vasc) {
this.psRiskScoresChads2vasc = psRiskScoresChads2vasc;
}
public int getPsInteractionYear() {
return psInteractionYear;
}
public void setPsInteractionYear(int psInteractionYear) {
this.psInteractionYear = psInteractionYear;
}
public int getPsInteractionMonth() {
return psInteractionMonth;
}
public void setPsInteractionMonth(int psInteractionMonth) {
this.psInteractionMonth = psInteractionMonth;
}
public int getOmCovariates() {
return omCovariates;
}
public void setOmCovariates(int omCovariates) {
this.omCovariates = omCovariates;
}
public int getOmExclusionId() {
return omExclusionId;
}
public void setOmExclusionId(int omExclusionId) {
this.omExclusionId = omExclusionId;
}
public int getOmInclusionId() {
return omInclusionId;
}
public void setOmInclusionId(int omInclusionId) {
this.omInclusionId = omInclusionId;
}
public int getOmDemographics() {
return omDemographics;
}
public void setOmDemographics(int omDemographics) {
this.omDemographics = omDemographics;
}
public int getOmDemographicsGender() {
return omDemographicsGender;
}
public void setOmDemographicsGender(int omDemographicsGender) {
this.omDemographicsGender = omDemographicsGender;
}
public int getOmDemographicsRace() {
return omDemographicsRace;
}
public void setOmDemographicsRace(int omDemographicsRace) {
this.omDemographicsRace = omDemographicsRace;
}
public int getOmDemographicsEthnicity() {
return omDemographicsEthnicity;
}
public void setOmDemographicsEthnicity(int omDemographicsEthnicity) {
this.omDemographicsEthnicity = omDemographicsEthnicity;
}
public int getOmDemographicsAge() {
return omDemographicsAge;
}
public void setOmDemographicsAge(int omDemographicsAge) {
this.omDemographicsAge = omDemographicsAge;
}
public int getOmDemographicsYear() {
return omDemographicsYear;
}
public void setOmDemographicsYear(int omDemographicsYear) {
this.omDemographicsYear = omDemographicsYear;
}
public int getOmDemographicsMonth() {
return omDemographicsMonth;
}
public void setOmDemographicsMonth(int omDemographicsMonth) {
this.omDemographicsMonth = omDemographicsMonth;
}
public int getOmTrim() {
return omTrim;
}
public void setOmTrim(int omTrim) {
this.omTrim = omTrim;
}
public int getOmTrimFraction() {
return omTrimFraction;
}
public void setOmTrimFraction(int omTrimFraction) {
this.omTrimFraction = omTrimFraction;
}
public int getOmMatch() {
return omMatch;
}
public void setOmMatch(int omMatch) {
this.omMatch = omMatch;
}
public int getOmMatchMaxRatio() {
return omMatchMaxRatio;
}
public void setOmMatchMaxRatio(int omMatchMaxRatio) {
this.omMatchMaxRatio = omMatchMaxRatio;
}
public int getOmStrat() {
return omStrat;
}
public void setOmStrat(int omStrat) {
this.omStrat = omStrat;
}
public int getOmStratNumStrata() {
return omStratNumStrata;
}
public void setOmStratNumStrata(int omStratNumStrata) {
this.omStratNumStrata = omStratNumStrata;
}
public int getOmConditionOcc() {
return omConditionOcc;
}
public void setOmConditionOcc(int omConditionOcc) {
this.omConditionOcc = omConditionOcc;
}
public int getOmConditionOcc365d() {
return omConditionOcc365d;
}
public void setOmConditionOcc365d(int omConditionOcc365d) {
this.omConditionOcc365d = omConditionOcc365d;
}
public int getOmConditionOcc30d() {
return omConditionOcc30d;
}
public void setOmConditionOcc30d(int omConditionOcc30d) {
this.omConditionOcc30d = omConditionOcc30d;
}
public int getOmConditionOccInpt180d() {
return omConditionOccInpt180d;
}
public void setOmConditionOccInpt180d(int omConditionOccInpt180d) {
this.omConditionOccInpt180d = omConditionOccInpt180d;
}
public int getOmConditionEra() {
return omConditionEra;
}
public void setOmConditionEra(int omConditionEra) {
this.omConditionEra = omConditionEra;
}
public int getOmConditionEraEver() {
return omConditionEraEver;
}
public void setOmConditionEraEver(int omConditionEraEver) {
this.omConditionEraEver = omConditionEraEver;
}
public int getOmConditionEraOverlap() {
return omConditionEraOverlap;
}
public void setOmConditionEraOverlap(int omConditionEraOverlap) {
this.omConditionEraOverlap = omConditionEraOverlap;
}
public int getOmConditionGroup() {
return omConditionGroup;
}
public void setOmConditionGroup(int omConditionGroup) {
this.omConditionGroup = omConditionGroup;
}
public int getOmConditionGroupMeddra() {
return omConditionGroupMeddra;
}
public void setOmConditionGroupMeddra(int omConditionGroupMeddra) {
this.omConditionGroupMeddra = omConditionGroupMeddra;
}
public int getOmConditionGroupSnomed() {
return omConditionGroupSnomed;
}
public void setOmConditionGroupSnomed(int omConditionGroupSnomed) {
this.omConditionGroupSnomed = omConditionGroupSnomed;
}
public int getOmDrugExposure() {
return omDrugExposure;
}
public void setOmDrugExposure(int omDrugExposure) {
this.omDrugExposure = omDrugExposure;
}
public int getOmDrugExposure365d() {
return omDrugExposure365d;
}
public void setOmDrugExposure365d(int omDrugExposure365d) {
this.omDrugExposure365d = omDrugExposure365d;
}
public int getOmDrugExposure30d() {
return omDrugExposure30d;
}
public void setOmDrugExposure30d(int omDrugExposure30d) {
this.omDrugExposure30d = omDrugExposure30d;
}
public int getOmDrugEra() {
return omDrugEra;
}
public void setOmDrugEra(int omDrugEra) {
this.omDrugEra = omDrugEra;
}
public int getOmDrugEra365d() {
return omDrugEra365d;
}
public void setOmDrugEra365d(int omDrugEra365d) {
this.omDrugEra365d = omDrugEra365d;
}
public int getOmDrugEra30d() {
return omDrugEra30d;
}
public void setOmDrugEra30d(int omDrugEra30d) {
this.omDrugEra30d = omDrugEra30d;
}
public int getOmDrugEraOverlap() {
return omDrugEraOverlap;
}
public void setOmDrugEraOverlap(int omDrugEraOverlap) {
this.omDrugEraOverlap = omDrugEraOverlap;
}
public int getOmDrugEraEver() {
return omDrugEraEver;
}
public void setOmDrugEraEver(int omDrugEraEver) {
this.omDrugEraEver = omDrugEraEver;
}
public int getOmDrugGroup() {
return omDrugGroup;
}
public void setOmDrugGroup(int omDrugGroup) {
this.omDrugGroup = omDrugGroup;
}
public int getOmProcedureOcc() {
return omProcedureOcc;
}
public void setOmProcedureOcc(int omProcedureOcc) {
this.omProcedureOcc = omProcedureOcc;
}
public int getOmProcedureOcc365d() {
return omProcedureOcc365d;
}
public void setOmProcedureOcc365d(int omProcedureOcc365d) {
this.omProcedureOcc365d = omProcedureOcc365d;
}
public int getOmProcedureOcc30d() {
return omProcedureOcc30d;
}
public void setOmProcedureOcc30d(int omProcedureOcc30d) {
this.omProcedureOcc30d = omProcedureOcc30d;
}
public int getOmProcedureGroup() {
return omProcedureGroup;
}
public void setOmProcedureGroup(int omProcedureGroup) {
this.omProcedureGroup = omProcedureGroup;
}
public int getOmObservation() {
return omObservation;
}
public void setOmObservation(int omObservation) {
this.omObservation = omObservation;
}
public int getOmObservation365d() {
return omObservation365d;
}
public void setOmObservation365d(int omObservation365d) {
this.omObservation365d = omObservation365d;
}
public int getOmObservation30d() {
return omObservation30d;
}
public void setOmObservation30d(int omObservation30d) {
this.omObservation30d = omObservation30d;
}
public int getOmObservationCount365d() {
return omObservationCount365d;
}
public void setOmObservationCount365d(int omObservationCount365d) {
this.omObservationCount365d = omObservationCount365d;
}
public int getOmMeasurement() {
return omMeasurement;
}
public void setOmMeasurement(int omMeasurement) {
this.omMeasurement = omMeasurement;
}
public int getOmMeasurement365d() {
return omMeasurement365d;
}
public void setOmMeasurement365d(int omMeasurement365d) {
this.omMeasurement365d = omMeasurement365d;
}
public int getOmMeasurement30d() {
return omMeasurement30d;
}
public void setOmMeasurement30d(int omMeasurement30d) {
this.omMeasurement30d = omMeasurement30d;
}
public int getOmMeasurementCount365d() {
return omMeasurementCount365d;
}
public void setOmMeasurementCount365d(int omMeasurementCount365d) {
this.omMeasurementCount365d = omMeasurementCount365d;
}
public int getOmMeasurementBelow() {
return omMeasurementBelow;
}
public void setOmMeasurementBelow(int omMeasurementBelow) {
this.omMeasurementBelow = omMeasurementBelow;
}
public int getOmMeasurementAbove() {
return omMeasurementAbove;
}
public void setOmMeasurementAbove(int omMeasurementAbove) {
this.omMeasurementAbove = omMeasurementAbove;
}
public int getOmConceptCounts() {
return omConceptCounts;
}
public void setOmConceptCounts(int omConceptCounts) {
this.omConceptCounts = omConceptCounts;
}
public int getOmRiskScores() {
return omRiskScores;
}
public void setOmRiskScores(int omRiskScores) {
this.omRiskScores = omRiskScores;
}
public int getOmRiskScoresCharlson() {
return omRiskScoresCharlson;
}
public void setOmRiskScoresCharlson(int omRiskScoresCharlson) {
this.omRiskScoresCharlson = omRiskScoresCharlson;
}
public int getOmRiskScoresDcsi() {
return omRiskScoresDcsi;
}
public void setOmRiskScoresDcsi(int omRiskScoresDcsi) {
this.omRiskScoresDcsi = omRiskScoresDcsi;
}
public int getOmRiskScoresChads2() {
return omRiskScoresChads2;
}
public void setOmRiskScoresChads2(int omRiskScoresChads2) {
this.omRiskScoresChads2 = omRiskScoresChads2;
}
public int getOmRiskScoresChads2vasc() {
return omRiskScoresChads2vasc;
}
public void setOmRiskScoresChads2vasc(int omRiskScoresChads2vasc) {
this.omRiskScoresChads2vasc = omRiskScoresChads2vasc;
}
public int getOmInteractionYear() {
return omInteractionYear;
}
public void setOmInteractionYear(int omInteractionYear) {
this.omInteractionYear = omInteractionYear;
}
public int getOmInteractionMonth() {
return omInteractionMonth;
}
public void setOmInteractionMonth(int omInteractionMonth) {
this.omInteractionMonth = omInteractionMonth;
}
public int getDelCovariatesSmallCount() {
return delCovariatesSmallCount;
}
public void setDelCovariatesSmallCount(int delCovariatesSmallCount) {
this.delCovariatesSmallCount = delCovariatesSmallCount;
}
public int getNegativeControlId() {
return negativeControlId;
}
public void setNegativeControlId(int negativeControlId) {
this.negativeControlId = negativeControlId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/SaveConceptSetAnnotationsRequest.java | src/main/java/org/ohdsi/webapi/service/dto/SaveConceptSetAnnotationsRequest.java | package org.ohdsi.webapi.service.dto;
import java.util.List;
public class SaveConceptSetAnnotationsRequest {
private List<AnnotationDTO> newAnnotation;
private List<AnnotationDTO> removeAnnotation;
public List<AnnotationDTO> getNewAnnotation() {
return newAnnotation;
}
public void setNewAnnotation(List<AnnotationDTO> newAnnotation) {
this.newAnnotation = newAnnotation;
}
public List<AnnotationDTO> getRemoveAnnotation() {
return removeAnnotation;
}
public void setRemoveAnnotation(List<AnnotationDTO> removeAnnotation) {
this.removeAnnotation = removeAnnotation;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/CommonEntityExtDTO.java | src/main/java/org/ohdsi/webapi/service/dto/CommonEntityExtDTO.java | package org.ohdsi.webapi.service.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.analysis.CohortMetadata;
import org.ohdsi.webapi.CommonDTO;
import org.ohdsi.webapi.cohortdefinition.CohortMetadataExt;
import org.ohdsi.webapi.tag.dto.TagDTO;
import org.ohdsi.webapi.user.dto.UserDTO;
import java.util.Date;
import java.util.List;
import java.util.Set;
@JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class CommonEntityExtDTO extends CommonEntityDTO{
@JsonProperty
private Set<TagDTO> tags;
public Set<TagDTO> getTags() {
return tags;
}
public void setTags(Set<TagDTO> tags) {
this.tags = tags;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/AnnotationDTO.java | src/main/java/org/ohdsi/webapi/service/dto/AnnotationDTO.java | package org.ohdsi.webapi.service.dto;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public class AnnotationDTO {
private Integer id;
private String createdBy;
private String createdDate;
private String vocabularyVersion;
private Integer conceptSetVersion;
private String searchData;
private String copiedFromConceptSetIds;
private Integer conceptId;
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public String getCreatedDate() {
return createdDate;
}
public void setCreatedDate(String createdDate) {
this.createdDate = createdDate;
}
public String getVocabularyVersion() {
return vocabularyVersion;
}
public void setVocabularyVersion(String vocabularyVersion) {
this.vocabularyVersion = vocabularyVersion;
}
public Integer getConceptSetVersion() {
return conceptSetVersion;
}
public void setConceptSetVersion(Integer conceptSetVersion) {
this.conceptSetVersion = conceptSetVersion;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getSearchData() {
return searchData;
}
public void setSearchData(String searchData) {
this.searchData = searchData;
}
public Integer getConceptId() {
return conceptId;
}
public void setConceptId(Integer conceptId) {
this.conceptId = conceptId;
}
public String getCopiedFromConceptSetIds() {
return copiedFromConceptSetIds;
}
public void setCopiedFromConceptSetIds(String copiedFromConceptSetIds) {
this.copiedFromConceptSetIds = copiedFromConceptSetIds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisShortDTO.java | src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisShortDTO.java | package org.ohdsi.webapi.service.dto;
import org.ohdsi.webapi.model.CommonEntityExt;
public class IRAnalysisShortDTO extends CommonEntityExtDTO {
private Integer id;
private String name;
private String description;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/CommonEntityDTO.java | src/main/java/org/ohdsi/webapi/service/dto/CommonEntityDTO.java | package org.ohdsi.webapi.service.dto;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import org.ohdsi.webapi.user.dto.UserDTO;
import java.util.Date;
import org.ohdsi.webapi.CommonDTO;
@JsonInclude(JsonInclude.Include.NON_NULL)
public abstract class CommonEntityDTO implements CommonDTO, Serializable {
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
private UserDTO createdBy;
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
private UserDTO modifiedBy;
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
private Date createdDate;
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
private Date modifiedDate;
private boolean hasWriteAccess;
private boolean hasReadAccess;
public UserDTO getCreatedBy() {
return createdBy;
}
public void setCreatedBy(UserDTO createdBy) {
this.createdBy = createdBy;
}
public UserDTO getModifiedBy() {
return modifiedBy;
}
public void setModifiedBy(UserDTO modifiedBy) {
this.modifiedBy = modifiedBy;
}
public Date getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public Date getModifiedDate() {
return modifiedDate;
}
public void setModifiedDate(Date modifiedDate) {
this.modifiedDate = modifiedDate;
}
public boolean isHasWriteAccess() {
return hasWriteAccess;
}
public void setHasWriteAccess(boolean hasWriteAccess) {
this.hasWriteAccess = hasWriteAccess;
}
public boolean isHasReadAccess() {
return hasReadAccess;
}
public void setHasReadAccess(boolean hasReadAccess) {
this.hasReadAccess = hasReadAccess;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/AnnotationDetailsDTO.java | src/main/java/org/ohdsi/webapi/service/dto/AnnotationDetailsDTO.java | package org.ohdsi.webapi.service.dto;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonIgnoreProperties(ignoreUnknown = true)
public class AnnotationDetailsDTO {
private Integer id;
private String searchData;
private Integer conceptId;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getSearchData() {
return searchData;
}
public void setSearchData(String searchData) {
this.searchData = searchData;
}
public Integer getConceptId() {
return conceptId;
}
public void setConceptId(Integer conceptId) {
this.conceptId = conceptId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/dto/CheckResultDTO.java | src/main/java/org/ohdsi/webapi/service/dto/CheckResultDTO.java | package org.ohdsi.webapi.service.dto;
import org.ohdsi.circe.check.Warning;
import java.util.List;
public class CheckResultDTO {
private List<Warning> warnings;
public CheckResultDTO(List<Warning> warnings) {
this.warnings = warnings;
}
public List<Warning> getWarnings() {
return warnings;
}
public void setWarnings(List<Warning> warnings) {
this.warnings = warnings;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/BaseCommonDTOToEntityConverter.java | src/main/java/org/ohdsi/webapi/service/converters/BaseCommonDTOToEntityConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.model.CommonEntity;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
public abstract class BaseCommonDTOToEntityConverter<S extends CommonEntityDTO, T extends CommonEntity<? extends Number>>
extends BaseConversionServiceAwareConverter<S, T> {
protected abstract void doConvert(S source, T target);
@Override
public T convert(S s) {
T target = createResultObject(s);
doConvert(s, target);
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/IncidenceRateAnalysisToIRAnalysisDTOConverter.java | src/main/java/org/ohdsi/webapi/service/converters/IncidenceRateAnalysisToIRAnalysisDTOConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis;
import org.ohdsi.webapi.service.dto.IRAnalysisDTO;
import org.springframework.stereotype.Component;
@Component
public class IncidenceRateAnalysisToIRAnalysisDTOConverter extends IncidenceRateAnalysisToIRAnalysisShortDTOConverter<IRAnalysisDTO> {
@Override
protected IRAnalysisDTO createResultObject() {
return new IRAnalysisDTO();
}
@Override
protected void doConvert(IncidenceRateAnalysis source, IRAnalysisDTO target) {
super.doConvert(source, target);
target.setExpression(source.getDetails() != null ? source.getDetails().getExpression() : null);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/BaseCommonEntityToDTOConverter.java | src/main/java/org/ohdsi/webapi/service/converters/BaseCommonEntityToDTOConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.model.CommonEntity;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
import static org.ohdsi.webapi.util.ConversionUtils.convertMetadata;
public abstract class BaseCommonEntityToDTOConverter<S extends CommonEntity<? extends Number>, T extends CommonEntityDTO>
extends BaseConversionServiceAwareConverter<S, T> {
protected abstract void doConvert(S source, T target);
@Override
public T convert(S s) {
T target = createResultObject(s);
convertMetadata(conversionService, s, target);
doConvert(s, target);
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/ConceptSetToConceptSetDTOConverter.java | src/main/java/org/ohdsi/webapi/service/converters/ConceptSetToConceptSetDTOConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.conceptset.ConceptSet;
import org.ohdsi.webapi.service.dto.ConceptSetDTO;
import org.springframework.stereotype.Component;
@Component
public class ConceptSetToConceptSetDTOConverter extends BaseCommonEntityExtToDTOExtConverter<ConceptSet, ConceptSetDTO> {
@Override
protected ConceptSetDTO createResultObject() {
return new ConceptSetDTO();
}
@Override
protected void doConvert(ConceptSet source, ConceptSetDTO target) {
target.setId(source.getId());
target.setName(source.getName());
target.setDescription(source.getDescription());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/BaseCommonDTOExtToEntityExtConverter.java | src/main/java/org/ohdsi/webapi/service/converters/BaseCommonDTOExtToEntityExtConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.model.CommonEntityExt;
import org.ohdsi.webapi.service.dto.CommonEntityExtDTO;
public abstract class BaseCommonDTOExtToEntityExtConverter<S extends CommonEntityExtDTO, T extends CommonEntityExt<? extends Number>>
extends BaseConversionServiceAwareConverter<S, T> {
protected abstract void doConvert(S source, T target);
@Override
public T convert(S s) {
T target = createResultObject(s);
doConvert(s, target);
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/SourceRequestToSourceConverter.java | src/main/java/org/ohdsi/webapi/service/converters/SourceRequestToSourceConverter.java | package org.ohdsi.webapi.service.converters;
import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.KerberosAuthMechanism;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceRequest;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.stereotype.Component;
@Component
public class SourceRequestToSourceConverter implements Converter<SourceRequest, Source> {
public SourceRequestToSourceConverter(GenericConversionService conversionService) {
conversionService.addConverter(this);
}
@Override
public Source convert(SourceRequest request) {
Source source = new Source();
source.setSourceName(request.getName());
source.setSourceConnection(request.getConnectionString());
source.setUsername(request.getUsername());
source.setPassword(request.getPassword());
source.setSourceDialect(request.getDialect());
source.setSourceKey(request.getKey());
source.setDaimons(request.getDaimons());
source.getDaimons().forEach(d -> d.setSource(source));
source.setKeyfileName(request.getKeyfileName());
source.setKrbAdminServer(request.getKrbAdminServer());
source.setKrbAuthMethod(KerberosAuthMechanism.getByName(request.getKrbAuthMethod()));
if (request.isCheckConnection() != null) {
source.setCheckConnection(request.isCheckConnection());
}
return source;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/IncidenceRateAnalysisToIRAnalysisShortDTOConverter.java | src/main/java/org/ohdsi/webapi/service/converters/IncidenceRateAnalysisToIRAnalysisShortDTOConverter.java | package org.ohdsi.webapi.service.converters;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis;
import org.ohdsi.webapi.service.dto.IRAnalysisShortDTO;
import org.springframework.stereotype.Component;
@Component
public class IncidenceRateAnalysisToIRAnalysisShortDTOConverter<T extends IRAnalysisShortDTO> extends BaseCommonEntityExtToDTOExtConverter<IncidenceRateAnalysis, T> {
@Override
protected T createResultObject() {
return (T) new IRAnalysisShortDTO();
}
@Override
protected void doConvert(IncidenceRateAnalysis source, T target) {
target.setId(source.getId());
target.setName(StringUtils.trim(source.getName()));
target.setDescription(source.getDescription());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/BaseCommonEntityExtToDTOExtConverter.java | src/main/java/org/ohdsi/webapi/service/converters/BaseCommonEntityExtToDTOExtConverter.java | package org.ohdsi.webapi.service.converters;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.model.CommonEntityExt;
import org.ohdsi.webapi.service.dto.CommonEntityExtDTO;
import static org.ohdsi.webapi.util.ConversionUtils.convertMetadataExt;
public abstract class BaseCommonEntityExtToDTOExtConverter<S extends CommonEntityExt<? extends Number>, T extends CommonEntityExtDTO>
extends BaseConversionServiceAwareConverter<S, T> {
protected abstract void doConvert(S source, T target);
@Override
public T convert(S s) {
T target = createResultObject(s);
convertMetadataExt(conversionService, s, target);
doConvert(s, target);
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/converters/ConceptSetDTOToConceptSetConverter.java | src/main/java/org/ohdsi/webapi/service/converters/ConceptSetDTOToConceptSetConverter.java | package org.ohdsi.webapi.service.converters;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.conceptset.ConceptSet;
import org.ohdsi.webapi.service.dto.ConceptSetDTO;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.stereotype.Component;
@Component
public class ConceptSetDTOToConceptSetConverter implements Converter<ConceptSetDTO, ConceptSet> {
public ConceptSetDTOToConceptSetConverter(GenericConversionService conversionService) {
conversionService.addConverter(this);
}
@Override
public ConceptSet convert(ConceptSetDTO dto) {
ConceptSet conceptSet = new ConceptSet();
conceptSet.setId(dto.getId());
conceptSet.setName(StringUtils.trim(dto.getName()));
conceptSet.setDescription(dto.getDescription());
return conceptSet;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/annotations/SearchDataTransformer.java | src/main/java/org/ohdsi/webapi/service/annotations/SearchDataTransformer.java | package org.ohdsi.webapi.service.annotations;
import org.apache.commons.lang3.StringUtils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.stereotype.Service;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@Service
public class SearchDataTransformer {
private static final String FILTER_DATA = "filterData";
private static final String TITLE = "title";
private static final String VALUE = "value";
private static final String KEY = "key";
private static final String FILTER_SOURCE = "filterSource";
private static final String FILTER_SOURCE_LABEL = "Filtered By";
private static final String SEARCH_TEXT = "searchText";
private static final String DEFAULT_FILTER_SOURCE = "Search";
private static final String DELIMITER = ", ";
private static final String ENTRY_FORMAT = "%s: \"%s\"";
public String convertJsonToReadableFormat(String jsonInput) {
JSONObject searchObject = new JSONObject(Optional.ofNullable(jsonInput).orElse("{}"));
if (searchObject.isEmpty()) {
return "";
}
StringBuilder result = new StringBuilder();
String filterSource = processFilterSource(searchObject);
append(result, getDefaultOrActual(filterSource, DEFAULT_FILTER_SOURCE));
JSONObject filterDataObject = searchObject.optJSONObject(FILTER_DATA);
JSONArray filterDataArray = searchObject.optJSONArray(FILTER_DATA);
if (filterDataObject != null) {
Optional.ofNullable(filterDataObject).map(this::processSearchText).ifPresent(searchText -> appendCommaSeparated(result, formatQuoted(searchText)));
Optional.ofNullable(filterDataObject.optJSONArray("filterColumns")).map(this::formatKeyValuePairs).ifPresent(
fdResult -> appendCommaSeparated(result, FILTER_SOURCE_LABEL + ": \"" + fdResult + "\"")
);
} else if (filterDataArray != null) {
String extractedData = formatKeyValuePairs(filterDataArray);
if (!extractedData.isEmpty()) {
appendCommaSeparated(result, FILTER_SOURCE_LABEL + ": \"" + extractedData + "\"");
}
}
return result.toString().trim();
}
private String processFilterSource(JSONObject jsonObject) {
return jsonObject.optString(FILTER_SOURCE, "");
}
private String processSearchText(JSONObject filterData) {
return filterData.optString(SEARCH_TEXT, "");
}
private String formatKeyValuePairs(JSONArray filterDataArray) {
return IntStream.range(0, filterDataArray.length())
.mapToObj(index -> formatEntry(filterDataArray.getJSONObject(index)))
.collect(Collectors.joining(DELIMITER));
}
private String formatEntry(JSONObject item) {
String title = optString(item, TITLE);
String key = StringUtils.unwrap(optString(item, KEY), '"');
return String.format(ENTRY_FORMAT, title, key);
}
private void appendCommaSeparated(StringBuilder builder, String part) {
if (!part.isEmpty()) {
append(builder, part);
}
}
private void append(StringBuilder builder, String part) {
if (builder.length() > 0) {
builder.append(DELIMITER);
}
builder.append(part);
}
private String optString(JSONObject item, String key) {
return item.optString(key, "");
}
private String getDefaultOrActual(String actual, String defaultVal) {
return actual.isEmpty() ? defaultVal : actual;
}
private String formatQuoted(String text) {
return String.format("\"%s\"", text);
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionType.java | src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionType.java | package org.ohdsi.webapi.service.cscompare;
public enum ExpressionType {
CONCEPT_NAME_CODE_AND_VOCABULARY_ID_ONLY,
FULL
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java | src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java | package org.ohdsi.webapi.service.cscompare;
import org.ohdsi.circe.vocabulary.Concept;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.webapi.conceptset.ConceptSetComparison;
import java.util.*;
import java.util.stream.Collector;
import java.util.stream.Collectors;
public class ExpressionFileUtils {
private static final String CODE_AND_VOCABID_KEY = "%s:%s";
private static final Collector<ConceptSetExpression.ConceptSetItem, ?, Map<String, Concept>> CONCEPT_MAP_COLLECTOR =
Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept);
private static final Collector<ConceptSetExpression.ConceptSetItem, ?, Map<String, String>> NAMES_MAP_COLLECTOR =
Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept.conceptName);
public static String getKey(final ConceptSetExpression.ConceptSetItem item) {
return String.format(CODE_AND_VOCABID_KEY, item.concept.conceptCode, item.concept.vocabularyId);
}
public static String getKey(final ConceptSetComparison item) {
return String.format(CODE_AND_VOCABID_KEY, item.conceptCode, item.vocabularyId);
}
public static Collection<ConceptSetComparison> combine(final Map<String, Concept> input1ex,
final Map<String, Concept> input2ex) {
final Collection<ConceptSetComparison> outValues = new ArrayList<>();
// combine "not found in DB from input1" and "not found in DB from input2" in one map (to deal with doubles)
final Map<String, Concept> combinedMap = new HashMap<>(input1ex);
combinedMap.putAll(input2ex);
combinedMap.forEach((key, value) -> {
final ConceptSetComparison out = new ConceptSetComparison();
final boolean isInIntersection = input1ex.containsKey(key) && input2ex.containsKey(key);
final boolean isIn1Only = !isInIntersection && input1ex.containsKey(key);
final boolean isIn2Only = !isInIntersection && input2ex.containsKey(key);
out.conceptIn1Only = isIn1Only ? 1L : 0;
out.conceptIn2Only = isIn2Only ? 1L : 0;
out.conceptIn1And2 = isInIntersection ? 1L : 0;
out.conceptName = value.conceptName;
out.conceptCode = value.conceptCode;
out.vocabularyId = value.vocabularyId;
outValues.add(out);
});
return outValues;
}
public static Map<String, Concept> toExclusionMap(final ConceptSetExpression.ConceptSetItem[] in1, final Collection<ConceptSetComparison> fromDb) {
return Arrays.stream(in1).filter(item ->
fromDb.stream().noneMatch(out -> out.conceptCode.equals(item.concept.conceptCode) && out.vocabularyId.equals(item.concept.vocabularyId))
).collect(CONCEPT_MAP_COLLECTOR);
}
public static Map<String, String> toNamesMap(final ConceptSetExpression.ConceptSetItem[] in1,
final ConceptSetExpression.ConceptSetItem[] in2) {
final Map<String, String> names1 = Arrays.stream(in1).collect(NAMES_MAP_COLLECTOR);
final Map<String, String> names2 = Arrays.stream(in2).collect(NAMES_MAP_COLLECTOR);
final Map<String, String> namesCombined = new HashMap<>(names1);
namesCombined.putAll(names2);
return namesCombined;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/cscompare/CompareArbitraryDto.java | src/main/java/org/ohdsi/webapi/service/cscompare/CompareArbitraryDto.java | package org.ohdsi.webapi.service.cscompare;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
public class CompareArbitraryDto {
public ExpressionType[] types;
public ConceptSetExpression[] compareTargets;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java | src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java | package org.ohdsi.webapi.service.cscompare;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.conceptset.ConceptSetComparison;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionCallback;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.Collection;
import java.util.UUID;
import java.util.function.Function;
@Service
public class ConceptSetCompareService extends AbstractDaoService {
private static final String TEMP_TABLE_CREATE_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/vocabulary/sql/createConceptSetTempTable.sql");
private static final String TEMP_TABLE_FILL_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/vocabulary/sql/fillConceptSetTempTable.sql");
private final String CONCEPT_SET_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/vocabulary/sql/getConceptByCodeAndVocabulary.sql");
private final String COMPARE_STATEMENT = ResourceHelper.GetResourceAsString("/resources/vocabulary/sql/compareConceptSets.sql");
private static final String TEMP_TABLE_NAME_TEMPLATE = "cs_code_%s";
private static final int DEFAULT_BATCH_SIZE = 100;
public static final RowMapper<ConceptSetComparison> CONCEPT_SET_COMPARISON_ROW_MAPPER = (rs, rowNum) -> {
ConceptSetComparison csc = new ConceptSetComparison();
csc.conceptId = rs.getLong("concept_id");
csc.conceptIn1Only = rs.getLong("concept_in_1_only");
csc.conceptIn2Only = rs.getLong("concept_in_2_only");
csc.conceptIn1And2 = rs.getLong("concept_in_both_1_and_2");
csc.conceptName = rs.getString("concept_name");
csc.standardConcept = rs.getString("standard_concept");
csc.invalidReason = rs.getString("invalid_reason");
csc.conceptCode = rs.getString("concept_code");
csc.domainId = rs.getString("domain_id");
csc.vocabularyId = rs.getString("vocabulary_id");
csc.validStartDate = rs.getDate("valid_start_date");
csc.validEndDate = rs.getDate("valid_end_date");
csc.conceptClassId = rs.getString("concept_class_id");
return csc;
};
public Collection<ConceptSetComparison> compareConceptSets(final String sourceKey,
final CompareArbitraryDto dto) throws Exception {
final ConceptSetExpression[] csExpressionList = dto.compareTargets;
if (csExpressionList.length != 2) {
throw new Exception("You must specify two concept set expressions in order to use this method.");
}
final Source source = getSourceRepository().findBySourceKey(sourceKey);
final String vocabSchema = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
final Function<JdbcTemplate, TransactionCallback<Collection<ConceptSetComparison>>> callbackFunction =
jdbcTemplate -> (TransactionCallback<Collection<ConceptSetComparison>>) transactionStatus -> {
try {
final String csQuery1 = getQuery(csExpressionList[0], dto.types[0], source, jdbcTemplate);
final String csQuery2 = getQuery(csExpressionList[1], dto.types[1], source, jdbcTemplate);
// Insert the queries into the overall comparison script
String sql = SqlRender.renderSql(COMPARE_STATEMENT, new String[]{"cs1_expression", "cs2_expression"}, new String[]{csQuery1, csQuery2});
sql = SqlRender.renderSql(sql, new String[]{"vocabulary_database_schema"}, new String[]{vocabSchema});
sql = SqlTranslate.translateSql(sql, source.getSourceDialect());
// Execute the query
return jdbcTemplate.query(sql, CONCEPT_SET_COMPARISON_ROW_MAPPER);
} catch (Exception ex) {
log.error("An error occurred during the comparing of concept sets", ex);
throw ex;
}
};
return executeInTransaction(source, callbackFunction);
}
private String getQuery(final ConceptSetExpression csExpression, final ExpressionType type,
final Source source, final JdbcTemplate jdbcTemplate) {
if (type == ExpressionType.CONCEPT_NAME_CODE_AND_VOCABULARY_ID_ONLY) {
final String tempTableName = createTempTable(source, jdbcTemplate);
fillTable(tempTableName, csExpression, jdbcTemplate);
return StringUtils.replace(CONCEPT_SET_QUERY_TEMPLATE, "@temp_table", tempTableName);
} else {
final ConceptSetExpressionQueryBuilder builder = new ConceptSetExpressionQueryBuilder();
return builder.buildExpressionQuery(csExpression);
}
}
private String createTempTable(final Source source, final JdbcTemplate jdbcTemplate) {
final String tableName = String.format(TEMP_TABLE_NAME_TEMPLATE,
StringUtils.replace(UUID.randomUUID().toString(), "-", ""));
String createStatement = SqlRender.renderSql(TEMP_TABLE_CREATE_TEMPLATE, new String[]{"temp_table"}, new String[]{tableName});
createStatement = SqlTranslate.translateSql(createStatement, source.getSourceDialect());
jdbcTemplate.execute(createStatement);
return tableName;
}
private void fillTable(final String tableName, final ConceptSetExpression expression, final JdbcTemplate jdbcTemplate) {
if (expression.items.length > 0) {
final String insertStatement = SqlRender.renderSql(TEMP_TABLE_FILL_TEMPLATE, new String[]{"temp_table"}, new String[]{tableName});
jdbcTemplate.batchUpdate(insertStatement,
Arrays.asList(expression.items),
DEFAULT_BATCH_SIZE,
(PreparedStatement ps, ConceptSetExpression.ConceptSetItem item) -> {
ps.setString(1, item.concept.conceptCode);
ps.setString(2, item.concept.vocabularyId);
});
}
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/vocabulary/ConceptSetStrategy.java | src/main/java/org/ohdsi/webapi/service/vocabulary/ConceptSetStrategy.java | package org.ohdsi.webapi.service.vocabulary;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import java.util.Objects;
import java.util.function.Function;
public class ConceptSetStrategy implements StatementPrepareStrategy {
private ConceptSetExpression expression;
public ConceptSetStrategy(ConceptSetExpression expression) {
this.expression = expression;
}
@Override
public PreparedStatementRenderer prepareStatement(Source source, Function<String, String> queryModifier) {
ConceptSetExpressionQueryBuilder builder = new ConceptSetExpressionQueryBuilder();
String sql = builder.buildExpressionQuery(expression);
if (Objects.nonNull(queryModifier)) {
sql = queryModifier.apply(sql);
}
String tqName = "vocabulary_database_schema";
String vocabularyTableQualifierName = "vocabularyTableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String vocabularyTableQualifierValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String[] tableQualifierNames = {tqName, vocabularyTableQualifierName};
String[] tableQualifierValues = {tqValue, vocabularyTableQualifierValue};
sql = SqlRender.renderSql(sql, tableQualifierNames, tableQualifierValues);
return new PreparedStatementRenderer(source, sql, tableQualifierNames, tableQualifierValues, null);
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/service/vocabulary/StatementPrepareStrategy.java | src/main/java/org/ohdsi/webapi/service/vocabulary/StatementPrepareStrategy.java | package org.ohdsi.webapi.service.vocabulary;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import java.util.function.Function;
public interface StatementPrepareStrategy {
PreparedStatementRenderer prepareStatement(Source source, Function<String, String> queryModifier);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cohort/CohortEntity.java | src/main/java/org/ohdsi/webapi/cohort/CohortEntity.java | package org.ohdsi.webapi.cohort;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.IdClass;
import javax.persistence.Table;
@Entity(name = "CohortEntity")
@Table(name = "cohort")
@IdClass(CohortEntity.class)
public class CohortEntity implements Serializable {
private static final long serialVersionUID = 7736489323230370316L;
@Id
@Column(name = "cohort_definition_id")
private Long cohortDefinitionId;
@Id
@Column(name = "subject_id")
private Long subjectId;
@Column(name = "cohort_start_date")
private Date cohortStartDate;
@Column(name = "cohort_end_date")
private Date cohortEndDate;
public Long getCohortDefinitionId() {
return cohortDefinitionId;
}
public Long getSubjectId() {
return subjectId;
}
public void setSubjectId(Long subjectId) {
this.subjectId = subjectId;
}
public void setCohortDefinitionId(Long cohortDefinitionId) {
this.cohortDefinitionId = cohortDefinitionId;
}
public Date getCohortStartDate() {
return cohortStartDate;
}
public void setCohortStartDate(Date cohortStartDate) {
this.cohortStartDate = cohortStartDate;
}
public Date getCohortEndDate() {
return cohortEndDate;
}
public void setCohortEndDate(Date cohortEndDate) {
this.cohortEndDate = cohortEndDate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((cohortDefinitionId == null) ? 0 : cohortDefinitionId
.hashCode());
result = prime * result
+ ((subjectId == null) ? 0 : subjectId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CohortEntity other = (CohortEntity) obj;
if (cohortDefinitionId == null) {
if (other.cohortDefinitionId != null)
return false;
} else if (!cohortDefinitionId.equals(other.cohortDefinitionId))
return false;
if (subjectId == null) {
if (other.subjectId != null)
return false;
} else if (!subjectId.equals(other.subjectId))
return false;
return true;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cohort/CohortRepository.java | src/main/java/org/ohdsi/webapi/cohort/CohortRepository.java | package org.ohdsi.webapi.cohort;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
public interface CohortRepository extends CrudRepository<CohortEntity, Long> {
@Query("from CohortEntity where cohort_definition_id = ?1")
public List<CohortEntity> getAllCohortsForId(Long id);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/FileWriter.java | src/main/java/org/ohdsi/webapi/shiny/FileWriter.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.ws.rs.InternalServerErrorException;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.function.Consumer;
@Component
public class FileWriter {
private static final Logger LOG = LoggerFactory.getLogger(FileWriter.class);
private final ObjectMapper objectMapper = new ObjectMapper();
public Path writeTextFile(Path path, Consumer<PrintWriter> writer) {
try (OutputStream out = Files.newOutputStream(path); PrintWriter printWriter = new PrintWriter(out)) {
writer.accept(printWriter);
return path;
} catch (IOException e) {
LOG.error("Failed to write file", e);
throw new InternalServerErrorException();
}
}
public Path writeObjectAsJsonFile(Path parentDir, Object object, String filename) {
try {
Path file = Files.createFile(parentDir.resolve(filename));
try (OutputStream out = Files.newOutputStream(file)) {
objectMapper.writeValue(out, object);
}
return file;
} catch (IOException e) {
LOG.error("Failed to package Shiny application", e);
throw new InternalServerErrorException();
}
}
public void writeJsonNodeToFile(JsonNode object, Path path) {
try {
objectMapper.writeValue(path.toFile(), object);
} catch (IOException e) {
LOG.error("Failed to write json file", e);
throw new InternalServerErrorException();
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/CohortCharacterizationShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/CohortCharacterizationShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.QuoteMode;
import org.apache.commons.lang3.tuple.Pair;
import org.ohdsi.analysis.CohortMetadata;
import org.ohdsi.analysis.WithId;
import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization;
import org.ohdsi.webapi.cohortcharacterization.CcService;
import org.ohdsi.webapi.cohortcharacterization.domain.CcGenerationEntity;
import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity;
import org.ohdsi.webapi.cohortcharacterization.dto.ExecutionResultRequest;
import org.ohdsi.webapi.cohortcharacterization.dto.GenerationResults;
import org.ohdsi.webapi.cohortcharacterization.report.ExportItem;
import org.ohdsi.webapi.cohortcharacterization.report.Report;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.shiny.summary.DataSourceSummaryConverter;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.InternalServerErrorException;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Service
@ConditionalOnBean(ShinyService.class)
public class CohortCharacterizationShinyPackagingService extends CommonShinyPackagingService implements ShinyPackagingService {
private static final Logger LOG = LoggerFactory.getLogger(CohortCharacterizationShinyPackagingService.class);
private static final Float DEFAULT_THRESHOLD_VALUE = 0.01f;
private static final String SHINY_COHORT_CHARACTERIZATIONS_APP_TEMPLATE_FILE_PATH = "/shiny/shiny-cohortCharacterizations.zip";
private static final String APP_TITLE_FORMAT = "Characterization_%s_gv%sx_%s";
private final CcService ccService;
private final CohortCharacterizationAnalysisHeaderToFieldMapper cohortCharacterizationAnalysisHeaderToFieldMapper;
@Autowired
public CohortCharacterizationShinyPackagingService(
@Value("${shiny.atlas.url}") String atlasUrl,
@Value("${shiny.repo.link}") String repoLink,
FileWriter fileWriter,
ManifestUtils manifestUtils,
ObjectMapper objectMapper,
CcService ccService,
CohortCharacterizationAnalysisHeaderToFieldMapper cohortCharacterizationAnalysisHeaderToFieldMapper,
SourceRepository sourceRepository,
CDMResultsService cdmResultsService,
DataSourceSummaryConverter dataSourceSummaryConverter) {
super(atlasUrl, repoLink, fileWriter, manifestUtils, objectMapper, sourceRepository, cdmResultsService, dataSourceSummaryConverter);
this.ccService = ccService;
this.cohortCharacterizationAnalysisHeaderToFieldMapper = cohortCharacterizationAnalysisHeaderToFieldMapper;
}
@Override
public CommonAnalysisType getType() {
return CommonAnalysisType.COHORT_CHARACTERIZATION;
}
@Override
public String getAppTemplateFilePath() {
return SHINY_COHORT_CHARACTERIZATIONS_APP_TEMPLATE_FILE_PATH;
}
@Override
@Transactional
public void populateAppData(Integer generationId, String sourceKey, ShinyAppDataConsumers dataConsumers) {
CohortCharacterization cohortCharacterization = ccService.findDesignByGenerationId(Long.valueOf(generationId));
CohortCharacterizationEntity cohortCharacterizationEntity = ccService.findById(cohortCharacterization.getId());
GenerationResults generationResults = fetchGenerationResults(generationId, cohortCharacterization);
ExceptionUtils.throwNotFoundExceptionIfNull(generationResults, String.format("There are no analysis generation results with generationId = %d.", generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ATLAS_LINK.getValue(), String.format("%s/#/cc/characterizations/%s", atlasUrl, cohortCharacterization.getId()));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ANALYSIS_NAME.getValue(), cohortCharacterization.getName());
generationResults.getReports()
.stream()
.map(this::convertReportToCSV)
.forEach(csvDataByFilename -> dataConsumers.getTextFiles().accept(csvDataByFilename.getKey(), csvDataByFilename.getValue()));
CcGenerationEntity generationEntity = ccService.findGenerationById(Long.valueOf(generationId));
Long resultsTotalCount = ccService.getCCResultsTotalCount(Long.valueOf(generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR.getValue(), getAuthor(cohortCharacterizationEntity));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ASSET_ID.getValue(), cohortCharacterization.getId().toString());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATED_DATE.getValue(), getGenerationStartTime(generationEntity));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_RECORD_COUNT.getValue(), Long.toString(resultsTotalCount));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR_NOTES.getValue(), getDescription(cohortCharacterizationEntity));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_REFERENCED_COHORTS.getValue(), getReferencedCohorts(cohortCharacterizationEntity));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_VERSION_ID.getValue(), Integer.toString(generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATION_ID.getValue(), Integer.toString(generationId));
}
private String getReferencedCohorts(CohortCharacterizationEntity cohortCharacterizationEntity) {
if (cohortCharacterizationEntity != null) {
return cohortCharacterizationEntity.getCohortDefinitions().stream().map(CohortDefinition::getName).collect(Collectors.joining("; "));
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getAuthor(CohortCharacterizationEntity cohortCharacterizationEntity) {
if (cohortCharacterizationEntity.getCreatedBy() != null) {
return cohortCharacterizationEntity.getCreatedBy().getLogin();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getGenerationStartTime(CcGenerationEntity generation) {
if (generation != null) {
return dateToString(generation.getStartTime());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getDescription(CohortCharacterizationEntity cohortCharacterizationEntity) {
if (cohortCharacterizationEntity != null && cohortCharacterizationEntity.getDescription() != null) {
return escapeLineBreaks(cohortCharacterizationEntity.getDescription());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
//Pair.left == CSV filename
//Pair.right == CSV contents
private Pair<String, String> convertReportToCSV(Report report) {
boolean isComparativeAnalysis = report.isComparative;
String analysisName = report.analysisName;
String fileNameFormat = "Export %s(%s).csv";
String fileName = String.format(fileNameFormat, isComparativeAnalysis ? "comparison " : "", analysisName);
List<ExportItem> exportItems = report.items.stream()
.sorted()
.collect(Collectors.toList());
String[] header = Iterables.getOnlyElement(report.header);
String outCsv = prepareCsv(header, exportItems);
return Pair.of(fileName, outCsv);
}
private String prepareCsv(String[] headers, List<ExportItem> exportItems) {
try (StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter,
CSVFormat.Builder
.create()
.setQuoteMode(QuoteMode.NON_NUMERIC)
.setHeader(headers)
.build())) {
for (ExportItem<?> item : exportItems) {
List<String> record = new ArrayList<>();
for (String header : headers) {
String fieldName = cohortCharacterizationAnalysisHeaderToFieldMapper.getHeaderFieldMapping().get(header); // get the corresponding Java field name
Field field;
try {
if (fieldName != null) {
field = findFieldInClassHierarchy(item.getClass(), fieldName);
if (field != null) {
field.setAccessible(true);
record.add(String.valueOf(field.get(item)));
} else {
record.add(null);
}
}
} catch (IllegalAccessException ex) {
LOG.error("Error occurred while accessing field value", ex);
record.add("");
}
}
csvPrinter.printRecord(record);
}
return stringWriter.toString();
} catch (IOException e) {
LOG.error("Failed to create a CSV file with Cohort Characterization analysis details", e);
throw new InternalServerErrorException();
}
}
private Field findFieldInClassHierarchy(Class<?> clazz, String fieldName) {
if (clazz == null) {
return null;
}
Field field;
try {
field = clazz.getDeclaredField(fieldName);
} catch (NoSuchFieldException ex) {
field = findFieldInClassHierarchy(clazz.getSuperclass(), fieldName);
}
return field;
}
private GenerationResults fetchGenerationResults(Integer generationId, CohortCharacterization cohortCharacterization) {
ExecutionResultRequest executionResultRequest = new ExecutionResultRequest();
List<Integer> cohortIds = cohortCharacterization.getCohorts()
.stream()
.map(CohortMetadata::getId)
.collect(Collectors.toList());
List<Integer> analysisIds = cohortCharacterization.getFeatureAnalyses()
.stream()
.map(WithId::getId)
.map(Number::intValue)
.collect(Collectors.toList());
List<String> domainIds = cohortCharacterization.getFeatureAnalyses()
.stream()
.map(featureAnalysis -> featureAnalysis.getDomain().getName().toUpperCase())
.distinct()
.collect(Collectors.toList());
executionResultRequest.setAnalysisIds(analysisIds);
executionResultRequest.setCohortIds(cohortIds);
executionResultRequest.setDomainIds(domainIds);
executionResultRequest.setShowEmptyResults(Boolean.TRUE);
executionResultRequest.setThresholdValuePct(DEFAULT_THRESHOLD_VALUE);
return ccService.findResult(Long.valueOf(generationId), executionResultRequest);
}
@Override
@Transactional
public ApplicationBrief getBrief(Integer generationId, String sourceKey) {
CohortCharacterization cohortCharacterization = ccService.findDesignByGenerationId(Long.valueOf(generationId));
CohortCharacterizationEntity cohortCharacterizationEntity = ccService.findById(cohortCharacterization.getId());
ApplicationBrief applicationBrief = new ApplicationBrief();
applicationBrief.setName(String.format("%s_%s_%s", CommonAnalysisType.COHORT_CHARACTERIZATION.getCode(), generationId, sourceKey));
applicationBrief.setTitle(prepareAppTitle(cohortCharacterization.getId(), generationId, sourceKey));
applicationBrief.setDescription(cohortCharacterizationEntity.getDescription());
return applicationBrief;
}
private String prepareAppTitle(Long studyAssetId, Integer generationId, String sourceKey) {
return String.format(APP_TITLE_FORMAT, studyAssetId, generationId, sourceKey);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/PackagingStrategy.java | src/main/java/org/ohdsi/webapi/shiny/PackagingStrategy.java | package org.ohdsi.webapi.shiny;
import java.nio.file.Path;
import java.util.function.Function;
public interface PackagingStrategy extends Function<Path, Path> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/CohortCountsShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/CohortCountsShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.ohdsi.circe.cohortdefinition.CohortExpression;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfo;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfoId;
import org.ohdsi.webapi.cohortdefinition.CohortGenerationInfoRepository;
import org.ohdsi.webapi.cohortdefinition.InclusionRuleReport;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.service.CohortDefinitionService;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.shiny.summary.DataSourceSummaryConverter;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@ConditionalOnBean(ShinyService.class)
public class CohortCountsShinyPackagingService extends CommonShinyPackagingService implements ShinyPackagingService {
private static final String SHINY_COHORT_COUNTS_APP_TEMPLATE_FILE_PATH = "/shiny/shiny-cohortCounts.zip";
private static final String APP_TITLE_FORMAT = "Cohort_%s_gv%sx%s_%s";
private final CohortDefinitionService cohortDefinitionService;
private final CohortDefinitionRepository cohortDefinitionRepository;
private final CohortGenerationInfoRepository cohortGenerationInfoRepository;
@Autowired
public CohortCountsShinyPackagingService(
@Value("${shiny.atlas.url}") String atlasUrl,
@Value("${shiny.repo.link}") String repoLink,
FileWriter fileWriter,
ManifestUtils manifestUtils,
ObjectMapper objectMapper,
CohortDefinitionService cohortDefinitionService,
CohortDefinitionRepository cohortDefinitionRepository,
SourceRepository sourceRepository,
CohortGenerationInfoRepository cohortGenerationInfoRepository,
CDMResultsService cdmResultsService,
DataSourceSummaryConverter dataSourceSummaryConverter
) {
super(atlasUrl, repoLink, fileWriter, manifestUtils, objectMapper, sourceRepository, cdmResultsService, dataSourceSummaryConverter);
this.cohortDefinitionService = cohortDefinitionService;
this.cohortDefinitionRepository = cohortDefinitionRepository;
this.cohortGenerationInfoRepository = cohortGenerationInfoRepository;
}
@Override
public CommonAnalysisType getType() {
return CommonAnalysisType.COHORT;
}
@Override
public String getAppTemplateFilePath() {
return SHINY_COHORT_COUNTS_APP_TEMPLATE_FILE_PATH;
}
@Override
@Transactional
public void populateAppData(Integer generationId, String sourceKey, ShinyAppDataConsumers dataConsumers) {
CohortDefinition cohort = cohortDefinitionRepository.findOne(generationId);
ExceptionUtils.throwNotFoundExceptionIfNull(cohort, String.format("There is no cohort definition with id = %d.", generationId));
int sourceId = getSourceRepository().findBySourceKey(sourceKey).getId();
CohortGenerationInfo cohortGenerationInfo = cohortGenerationInfoRepository.findOne(new CohortGenerationInfoId(cohort.getId(), sourceId));
CohortExpression cohortExpression = cohort.getExpression();
String cohortSummaryAsMarkdown = cohortDefinitionService.convertCohortExpressionToMarkdown(cohortExpression);
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_COHORT_LINK.getValue(), String.format("%s/#/cohortdefinition/%s", atlasUrl, generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_COHORT_NAME.getValue(), cohort.getName());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR.getValue(), getAuthor(cohort));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ASSET_ID.getValue(), cohort.getId().toString());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATED_DATE.getValue(), getGenerationStartTime(cohortGenerationInfo));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_RECORD_COUNT.getValue(), getRecordCount(cohortGenerationInfo));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_PERSON_COUNT.getValue(), getPersonCount(cohortGenerationInfo));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR_NOTES.getValue(), getDescription(cohort));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_REFERENCED_COHORTS.getValue(), cohort.getName());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_VERSION_ID.getValue(), getGenerationId(cohortGenerationInfo.getId()));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATION_ID.getValue(), getGenerationId(cohortGenerationInfo.getId()));
dataConsumers.getTextFiles().accept("cohort_summary_markdown.txt", cohortSummaryAsMarkdown);
InclusionRuleReport byEventReport = cohortDefinitionService.getInclusionRuleReport(generationId, sourceKey, 0, null); //by event
InclusionRuleReport byPersonReport = cohortDefinitionService.getInclusionRuleReport(generationId, sourceKey, 1, null); //by person
dataConsumers.getJsonObjects().accept(sourceKey + "_by_event.json", byEventReport);
dataConsumers.getJsonObjects().accept(sourceKey + "_by_person.json", byPersonReport);
}
private String getGenerationId(CohortGenerationInfoId id) {
return id == null ? "" : Integer.toString(id.getCohortDefinitionId()).concat("x").concat(Integer.toString(id.getSourceId()));
}
private String getDescription(CohortDefinition cohort) {
if (cohort != null && cohort.getDescription() != null) {
return escapeLineBreaks(cohort.getDescription());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getPersonCount(CohortGenerationInfo cohortGenerationInfo) {
if (cohortGenerationInfo != null && cohortGenerationInfo.getPersonCount() != null) {
return cohortGenerationInfo.getPersonCount().toString();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getRecordCount(CohortGenerationInfo cohortGenerationInfo) {
if (cohortGenerationInfo != null && cohortGenerationInfo.getRecordCount() != null) {
return cohortGenerationInfo.getRecordCount().toString();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getGenerationStartTime(CohortGenerationInfo cohortGenerationInfo) {
if (cohortGenerationInfo != null && cohortGenerationInfo.getStartTime() != null) {
return dateToString(cohortGenerationInfo.getStartTime());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getAuthor(CohortDefinition cohort) {
if (cohort.getCreatedBy() != null) {
return cohort.getCreatedBy().getLogin();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
@Override
public ApplicationBrief getBrief(Integer generationId, String sourceKey) {
CohortDefinition cohort = cohortDefinitionRepository.findOne(generationId);
Integer assetId = cohort.getId();
Integer sourceId = sourceRepository.findBySourceKey(sourceKey).getSourceId();
ApplicationBrief brief = new ApplicationBrief();
brief.setName(String.format("%s_%s_%s", CommonAnalysisType.COHORT.getCode(), generationId, sourceKey));
brief.setTitle(prepareAppTitle(generationId, assetId, sourceId, sourceKey));
brief.setDescription(cohort.getDescription());
return brief;
}
private String prepareAppTitle(Integer generationId, Integer assetId, Integer sourceId, String sourceKey) {
return String.format(APP_TITLE_FORMAT, generationId, assetId, sourceId, sourceKey);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyController.java | src/main/java/org/ohdsi/webapi/shiny/ShinyController.java | package org.ohdsi.webapi.shiny;
import org.glassfish.jersey.media.multipart.ContentDisposition;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.shiro.annotations.DataSourceAccess;
import org.ohdsi.webapi.shiro.annotations.SourceKey;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.nio.file.Files;
@Component
@ConditionalOnProperty(name = "shiny.enabled", havingValue = "true")
@Path("/shiny")
public class ShinyController {
@Autowired
private ShinyService service;
@GET
@Path("/download/{type}/{id}/{sourceKey}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@DataSourceAccess
public Response downloadShinyApp(
@PathParam("type") String type,
@PathParam("id") final int id,
@PathParam("sourceKey") @SourceKey String sourceKey
) throws IOException {
TemporaryFile data = service.packageShinyApp(type, id, sourceKey, PackagingStrategies.zip());
ContentDisposition contentDisposition = ContentDisposition.type("attachment")
.fileName(data.getFilename())
.build();
return Response
.ok(Files.newInputStream(data.getFile()))
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.header(HttpHeaders.CONTENT_DISPOSITION, contentDisposition)
.build();
}
@GET
@Path("/publish/{type}/{id}/{sourceKey}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@DataSourceAccess
@Transactional
public Response publishShinyApp(
@PathParam("type") String type,
@PathParam("id") final int id,
@PathParam("sourceKey") @SourceKey String sourceKey
) {
service.publishApp(type, id, sourceKey);
return Response.ok().build();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ApplicationBrief.java | src/main/java/org/ohdsi/webapi/shiny/ApplicationBrief.java | package org.ohdsi.webapi.shiny;
public class ApplicationBrief {
private String name;
private String title;
private String description;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ManifestUtils.java | src/main/java/org/ohdsi/webapi/shiny/ManifestUtils.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.ws.rs.InternalServerErrorException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.function.Consumer;
@Component
public class ManifestUtils {
private final ObjectMapper objectMapper = new ObjectMapper();
private static final Logger LOG = LoggerFactory.getLogger(ManifestUtils.class);
public JsonNode parseManifest(Path path) {
try (InputStream in = Files.newInputStream(path)) {
return objectMapper.readTree(in);
} catch (IOException e) {
LOG.error("Failed to parse manifest", e);
throw new InternalServerErrorException();
}
}
public Consumer<Path> addDataToManifest(JsonNode manifest, Path root) {
return file -> {
JsonNode node = manifest.get("files");
if (node.isObject()) {
ObjectNode filesNode = (ObjectNode) node;
Path relative = root.relativize(file);
ObjectNode item = filesNode.putObject(relative.toString().replace("\\", "/"));
item.put("checksum", checksum(file));
} else {
LOG.error("Wrong manifest.json, there is no files section");
throw new InternalServerErrorException();
}
};
}
private String checksum(Path path) {
try (InputStream in = Files.newInputStream(path)) {
return DigestUtils.md5Hex(in);
} catch (IOException e) {
LOG.error("Failed to calculate checksum", e);
throw new InternalServerErrorException();
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyPublishedRepository.java | src/main/java/org/ohdsi/webapi/shiny/ShinyPublishedRepository.java | package org.ohdsi.webapi.shiny;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
@Repository
public interface ShinyPublishedRepository extends JpaRepository<ShinyPublishedEntity, Long> {
Optional<ShinyPublishedEntity> findByAnalysisIdAndSourceKey(Long id, String sourceKey);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/ShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
public interface ShinyPackagingService {
CommonAnalysisType getType();
TemporaryFile packageApp(Integer generationId, String sourceKey, PackagingStrategy packaging);
ApplicationBrief getBrief(Integer generationId, String sourceKey);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/CohortCharacterizationAnalysisHeaderToFieldMapper.java | src/main/java/org/ohdsi/webapi/shiny/CohortCharacterizationAnalysisHeaderToFieldMapper.java | package org.ohdsi.webapi.shiny;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
@Service
public class CohortCharacterizationAnalysisHeaderToFieldMapper {
private static final Logger LOG = LoggerFactory.getLogger(CohortCharacterizationAnalysisHeaderToFieldMapper.class);
private final Map<String, String> headerFieldMapping;
public CohortCharacterizationAnalysisHeaderToFieldMapper(@Value("classpath:shiny/cc-header-field-mapping.csv") Resource resource) throws IOException {
this.headerFieldMapping = new HashMap<>();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(resource.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
String[] parts = line.split(",", 2); // Split line into two parts
if (parts.length >= 2) { // Ensure that line has header and field
String header = parts[0];
String field = parts[1];
headerFieldMapping.put(header, field);
} else {
LOG.warn("ignoring a line due to unexpected count of parameters (!=2): " + line);
}
}
}
}
public Map<String, String> getHeaderFieldMapping() {
return headerFieldMapping;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyConfiguration.java | src/main/java/org/ohdsi/webapi/shiny/ShinyConfiguration.java | package org.ohdsi.webapi.shiny;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.shiny.posit.PositConnectProperties;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnBean(ShinyService.class)
@EnableConfigurationProperties(PositConnectProperties.class)
public class ShinyConfiguration {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyConstants.java | src/main/java/org/ohdsi/webapi/shiny/ShinyConstants.java | package org.ohdsi.webapi.shiny;
public enum ShinyConstants {
VALUE_NOT_AVAILABLE("N/A"),
DATE_TIME_FORMAT("yyyy-MM-dd HH:mm:ss"),
PROPERTY_NAME_REPO_LINK("repo_link"),
PROPERTY_NAME_COHORT_LINK("cohort_link"),
PROPERTY_NAME_COHORT_NAME("cohort_name"),
PROPERTY_NAME_ATLAS_URL("atlas_url"),
PROPERTY_NAME_ATLAS_LINK("atlas_link"),
PROPERTY_NAME_DATASOURCE_KEY("datasource"),
PROPERTY_NAME_DATASOURCE_NAME("datasource_name"),
PROPERTY_NAME_ASSET_ID("asset_id"),
PROPERTY_NAME_ASSET_NAME("asset_name"),
PROPERTY_NAME_ANALYSIS_NAME("analysis_name"),
PROPERTY_NAME_AUTHOR("author"),
PROPERTY_NAME_AUTHOR_NOTES("author_notes"),
PROPERTY_NAME_GENERATED_DATE("generated_date"),
PROPERTY_NAME_RECORD_COUNT("record_count"),
PROPERTY_NAME_REFERENCED_COHORTS("referenced_cohorts"),
PROPERTY_NAME_VERSION_ID("version_id"),
PROPERTY_NAME_GENERATION_ID("generation_id"),
PROPERTY_NAME_PERSON_COUNT("person_count");
private final String value;
ShinyConstants(String value) {
this.value = value;
}
public String getValue() {
return value;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ShinyPublishedEntity.java | src/main/java/org/ohdsi/webapi/shiny/ShinyPublishedEntity.java | package org.ohdsi.webapi.shiny;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.ohdsi.webapi.model.CommonEntity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import java.util.UUID;
@Entity
@Table(name = "shiny_published")
public class ShinyPublishedEntity extends CommonEntity<Long> {
@Id
@GenericGenerator(
name = "shiny_published_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "shiny_published_sequence"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "shiny_published_generator")
private Long id;
private CommonAnalysisType type;
@Column(name = "analysis_id")
private Long analysisId;
@Column(name = "source_key")
private String sourceKey;
@Column(name = "execution_id")
private Long executionId;
@Column(name = "content_id")
private UUID contentId;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public CommonAnalysisType getType() {
return type;
}
public void setType(CommonAnalysisType type) {
this.type = type;
}
public Long getAnalysisId() {
return analysisId;
}
public void setAnalysisId(Long analysisId) {
this.analysisId = analysisId;
}
public Long getExecutionId() {
return executionId;
}
public void setExecutionId(Long executionId) {
this.executionId = executionId;
}
public UUID getContentId() {
return contentId;
}
public void setContentId(UUID contentId) {
this.contentId = contentId;
}
public String getSourceKey() {
return sourceKey;
}
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/CommonShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/CommonShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import com.odysseusinc.arachne.execution_engine_common.util.CommonFileUtils;
import org.ohdsi.webapi.report.CDMDashboard;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.shiny.posit.PositConnectClientException;
import org.ohdsi.webapi.shiny.summary.DataSourceSummary;
import org.ohdsi.webapi.shiny.summary.DataSourceSummaryConverter;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.util.TempFileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.InternalServerErrorException;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public abstract class CommonShinyPackagingService {
private static final Logger LOG = LoggerFactory.getLogger(CommonShinyPackagingService.class);
protected final String atlasUrl;
protected String repoLink;
protected final FileWriter fileWriter;
protected final ManifestUtils manifestUtils;
protected final ObjectMapper objectMapper;
protected final SourceRepository sourceRepository;
protected final CDMResultsService cdmResultsService;
protected final DataSourceSummaryConverter dataSourceSummaryConverter;
public CommonShinyPackagingService(String atlasUrl, String repoLink, FileWriter fileWriter, ManifestUtils manifestUtils, ObjectMapper objectMapper, SourceRepository sourceRepository, CDMResultsService cdmResultsService, DataSourceSummaryConverter dataSourceSummaryConverter) {
this.atlasUrl = atlasUrl;
this.repoLink = repoLink;
this.fileWriter = fileWriter;
this.manifestUtils = manifestUtils;
this.objectMapper = objectMapper;
this.sourceRepository = sourceRepository;
this.cdmResultsService = cdmResultsService;
this.dataSourceSummaryConverter = dataSourceSummaryConverter;
}
public abstract CommonAnalysisType getType();
public abstract ApplicationBrief getBrief(Integer generationId, String sourceKey);
public abstract String getAppTemplateFilePath();
public abstract void populateAppData(
Integer generationId,
String sourceKey,
ShinyAppDataConsumers shinyAppDataConsumers
);
public String getAtlasUrl() {
return atlasUrl;
}
public String getRepoLink() {
return repoLink;
}
public void setRepoLink(String repoLink) {
this.repoLink = repoLink;
}
public FileWriter getFileWriter() {
return fileWriter;
}
public ManifestUtils getManifestUtils() {
return manifestUtils;
}
public ObjectMapper getObjectMapper() {
return objectMapper;
}
public SourceRepository getSourceRepository() {
return sourceRepository;
}
public CDMResultsService getCdmResultsService() {
return cdmResultsService;
}
public DataSourceSummaryConverter getDataSourceSummaryConverter() {
return dataSourceSummaryConverter;
}
class ShinyAppDataConsumers {
private final Map<String, String> applicationProperties = new HashMap<>();
private final Map<String, Object> jsonObjectsToSave = new HashMap<>();
private final Map<String, String> textFilesToSave = new HashMap<>();
private final BiConsumer<String, String> appPropertiesConsumer = applicationProperties::put;
private final BiConsumer<String, String> textFilesConsumer = textFilesToSave::put;
private final BiConsumer<String, Object> jsonObjectsConsumer = jsonObjectsToSave::put;
public BiConsumer<String, String> getAppProperties() {
return appPropertiesConsumer;
}
public BiConsumer<String, String> getTextFiles() {
return textFilesConsumer;
}
public BiConsumer<String, Object> getJsonObjects() {
return jsonObjectsConsumer;
}
}
public final TemporaryFile packageApp(Integer generationId, String sourceKey, PackagingStrategy packaging) {
return TempFileUtils.doInDirectory(path -> {
try {
File templateArchive = TempFileUtils.copyResourceToTempFile(getAppTemplateFilePath(), "shiny", ".zip");
CommonFileUtils.unzipFiles(templateArchive, path.toFile());
Path manifestPath = path.resolve("manifest.json");
if (!Files.exists(manifestPath)) {
throw new PositConnectClientException("manifest.json is not found in the Shiny Application");
}
JsonNode manifest = getManifestUtils().parseManifest(manifestPath);
Path dataDir = path.resolve("data");
Files.createDirectory(dataDir);
Source source = getSourceRepository().findBySourceKey(sourceKey);
ShinyAppDataConsumers shinyAppDataConsumers = new ShinyAppDataConsumers();
//Default properties common for each shiny app
shinyAppDataConsumers.applicationProperties.put(ShinyConstants.PROPERTY_NAME_REPO_LINK.getValue(), getRepoLink());
shinyAppDataConsumers.applicationProperties.put(ShinyConstants.PROPERTY_NAME_ATLAS_URL.getValue(), getAtlasUrl());
shinyAppDataConsumers.applicationProperties.put(ShinyConstants.PROPERTY_NAME_DATASOURCE_KEY.getValue(), sourceKey);
shinyAppDataConsumers.applicationProperties.put(ShinyConstants.PROPERTY_NAME_DATASOURCE_NAME.getValue(), source.getSourceName());
populateCDMDataSourceSummaryIfPresent(source, shinyAppDataConsumers);
populateAppData(generationId, sourceKey, shinyAppDataConsumers);
Stream<Path> textFilesPaths = shinyAppDataConsumers.textFilesToSave.entrySet()
.stream()
.map(entry -> getFileWriter().writeTextFile(dataDir.resolve(entry.getKey()), pw -> pw.print(entry.getValue())));
Stream<Path> jsonFilesPaths = shinyAppDataConsumers.jsonObjectsToSave.entrySet()
.stream()
.map(entry -> getFileWriter().writeObjectAsJsonFile(dataDir, entry.getValue(), entry.getKey()));
Stream<Path> appPropertiesFilePath = Stream.of(
getFileWriter().writeTextFile(dataDir.resolve("app.properties"), pw -> pw.print(convertAppPropertiesToString(shinyAppDataConsumers.applicationProperties)))
);
Stream.of(textFilesPaths, jsonFilesPaths, appPropertiesFilePath)
.flatMap(Function.identity())
.forEach(getManifestUtils().addDataToManifest(manifest, path));
getFileWriter().writeJsonNodeToFile(manifest, manifestPath);
Path appArchive = packaging.apply(path);
ApplicationBrief applicationBrief = getBrief(generationId, sourceKey);
return new TemporaryFile(String.format("%s.zip", applicationBrief.getTitle()), appArchive);
} catch (IOException e) {
LOG.error("Failed to prepare Shiny application", e);
throw new InternalServerErrorException();
}
});
}
private void populateCDMDataSourceSummaryIfPresent(Source source, ShinyAppDataConsumers shinyAppDataConsumers) {
DataSourceSummary dataSourceSummary;
try {
CDMDashboard cdmDashboard = getCdmResultsService().getDashboard(source.getSourceKey());
dataSourceSummary = getDataSourceSummaryConverter().convert(cdmDashboard);
} catch (Exception e) {
LOG.warn("Could not populate datasource summary", e);
dataSourceSummary = getDataSourceSummaryConverter().emptySummary(source.getSourceName());
}
shinyAppDataConsumers.jsonObjectsToSave.put("datasource_summary.json", dataSourceSummary);
}
private String convertAppPropertiesToString(Map<String, String> appProperties) {
return appProperties.entrySet().stream()
.map(entry -> String.format("%s=%s\n", entry.getKey(), entry.getValue()))
.collect(Collectors.joining());
}
protected String dateToString(Date date) {
if (date == null) return null;
DateFormat df = new SimpleDateFormat(ShinyConstants.DATE_TIME_FORMAT.getValue());
return df.format(date);
}
protected String escapeLineBreaks(String input) {
if (input == null) return null;
return input.replace("\n", "\\n").replace("\r", "\\r");
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/ConflictPositConnectException.java | src/main/java/org/ohdsi/webapi/shiny/ConflictPositConnectException.java | package org.ohdsi.webapi.shiny;
import org.ohdsi.webapi.shiny.posit.PositConnectClientException;
public class ConflictPositConnectException extends PositConnectClientException {
public ConflictPositConnectException(String message) {
super(message);
}
public ConflictPositConnectException(String message, Throwable cause) {
super(message, cause);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/IncidenceRatesShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/IncidenceRatesShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.QuoteMode;
import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO;
import org.ohdsi.webapi.ircalc.AnalysisReport;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExportExpression;
import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisRepository;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.service.IRAnalysisResource;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.service.dto.AnalysisInfoDTO;
import org.ohdsi.webapi.shiny.summary.DataSourceSummaryConverter;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.InternalServerErrorException;
import java.io.IOException;
import java.io.StringWriter;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
@Service
@ConditionalOnBean(ShinyService.class)
public class IncidenceRatesShinyPackagingService extends CommonShinyPackagingService implements ShinyPackagingService {
private static final Logger LOG = LoggerFactory.getLogger(IncidenceRatesShinyPackagingService.class);
private static final String SHINY_INCIDENCE_RATES_APP_TEMPLATE_FILE_PATH = "/shiny/shiny-incidenceRates.zip";
private static final String COHORT_TYPE_TARGET = "target";
private static final String COHORT_TYPE_OUTCOME = "outcome";
private static final String APP_NAME_FORMAT = "Incidence_%s_gv%sx%s_%s";
private final IncidenceRateAnalysisRepository incidenceRateAnalysisRepository;
private final IRAnalysisResource irAnalysisResource;
@Autowired
public IncidenceRatesShinyPackagingService(
@Value("${shiny.atlas.url}") String atlasUrl,
@Value("${shiny.repo.link}") String repoLink,
FileWriter fileWriter,
ManifestUtils manifestUtils,
ObjectMapper objectMapper,
IncidenceRateAnalysisRepository incidenceRateAnalysisRepository,
IRAnalysisResource irAnalysisResource,
SourceRepository sourceRepository,
CDMResultsService cdmResultsService,
DataSourceSummaryConverter dataSourceSummaryConverter) {
super(atlasUrl, repoLink, fileWriter, manifestUtils, objectMapper, sourceRepository, cdmResultsService, dataSourceSummaryConverter);
this.incidenceRateAnalysisRepository = incidenceRateAnalysisRepository;
this.irAnalysisResource = irAnalysisResource;
}
@Override
public CommonAnalysisType getType() {
return CommonAnalysisType.INCIDENCE;
}
@Override
public String getAppTemplateFilePath() {
return SHINY_INCIDENCE_RATES_APP_TEMPLATE_FILE_PATH;
}
@Override
@Transactional
public void populateAppData(Integer generationId, String sourceKey, ShinyAppDataConsumers dataConsumers) {
IncidenceRateAnalysis analysis = incidenceRateAnalysisRepository.findOne(generationId);
ExceptionUtils.throwNotFoundExceptionIfNull(analysis, String.format("There is no incidence rate analysis with id = %d.", generationId));
try {
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ATLAS_LINK.getValue(), String.format("%s/#/iranalysis/%s", atlasUrl, generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ANALYSIS_NAME.getValue(), analysis.getName());
IncidenceRateAnalysisExportExpression expression = objectMapper.readValue(analysis.getDetails().getExpression(), IncidenceRateAnalysisExportExpression.class);
AnalysisInfoDTO analysisInfoDTO = irAnalysisResource.getAnalysisInfo(analysis.getId(), sourceKey);
Integer assetId = analysis.getId();
Integer sourceId = sourceRepository.findBySourceKey(sourceKey).getSourceId();
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR.getValue(), getAuthor(analysis));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ASSET_ID.getValue(), analysis.getId().toString());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATED_DATE.getValue(), getGenerationStartTime(analysis));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_RECORD_COUNT.getValue(), getRecordCount(analysisInfoDTO));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_PERSON_COUNT.getValue(), getPersonCount(analysisInfoDTO));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR_NOTES.getValue(), getDescription(analysis));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_REFERENCED_COHORTS.getValue(), prepareReferencedCohorts(expression));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_VERSION_ID.getValue(), getGenerationId(assetId, sourceId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATION_ID.getValue(), getGenerationId(assetId, sourceId));
String csvWithCohortDetails = prepareCsvWithCohorts(expression);
dataConsumers.getTextFiles().accept("cohorts.csv", csvWithCohortDetails);
streamAnalysisReportsForAllCohortCombinations(expression, generationId, sourceKey)
.forEach(analysisReport ->
dataConsumers.getJsonObjects().accept(
String.format("%s_targetId%s_outcomeId%s.json", sourceKey, analysisReport.summary.targetId, analysisReport.summary.outcomeId),
analysisReport
)
);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
private String getAuthor(IncidenceRateAnalysis analysis) {
if (analysis.getCreatedBy() != null) {
return analysis.getCreatedBy().getLogin();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getGenerationStartTime(IncidenceRateAnalysis analysis) {
if (analysis != null) {
if (CollectionUtils.isNotEmpty(analysis.getExecutionInfoList())) {
return dateToString(Iterables.getLast(analysis.getExecutionInfoList()).getStartTime());
}
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getDescription(IncidenceRateAnalysis analysis) {
if (analysis != null && analysis.getDescription() != null) {
return escapeLineBreaks(analysis.getDescription());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getPersonCount(AnalysisInfoDTO analysisInfo) {
if (analysisInfo != null && CollectionUtils.isNotEmpty(analysisInfo.getSummaryList())) {
return Long.toString(Iterables.getLast(analysisInfo.getSummaryList()).cases);
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getRecordCount(AnalysisInfoDTO analysisInfo) {
if (analysisInfo != null && CollectionUtils.isNotEmpty(analysisInfo.getSummaryList())) {
return Long.toString(Iterables.getLast(analysisInfo.getSummaryList()).totalPersons);
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getGenerationId(Integer assetId, Integer sourceId) {
return assetId == null || sourceId == null ? "" : Integer.toString(assetId).concat("x").concat(Integer.toString(sourceId));
}
private String prepareReferencedCohorts(IncidenceRateAnalysisExportExpression expression) {
if (expression == null) {
return "";
}
Set<String> referencedCohortNames = new HashSet<>();
for (CohortDTO targetCohort : expression.targetCohorts) {
referencedCohortNames.add(targetCohort.getName());
}
for (CohortDTO outcomeCohort : expression.outcomeCohorts) {
referencedCohortNames.add(outcomeCohort.getName());
}
return String.join("; ", referencedCohortNames);
}
private Stream<AnalysisReport> streamAnalysisReportsForAllCohortCombinations(IncidenceRateAnalysisExportExpression expression, Integer analysisId, String sourceKey) {
List<CohortDTO> targetCohorts = expression.targetCohorts;
List<CohortDTO> outcomeCohorts = expression.outcomeCohorts;
return targetCohorts.stream()
.map(CohortDTO::getId)
.flatMap(targetCohortId -> streamAnalysisReportsForOneCohortCombination(targetCohortId, outcomeCohorts, analysisId, sourceKey));
}
private Stream<AnalysisReport> streamAnalysisReportsForOneCohortCombination(Integer targetCohortId, List<CohortDTO> outcomeCohorts, Integer analysisId, String sourceKey) {
return outcomeCohorts.stream()
.map(outcomeCohort -> {
AnalysisReport analysisReport = irAnalysisResource.getAnalysisReport(analysisId, sourceKey, targetCohortId, outcomeCohort.getId());
if (analysisReport.summary == null) {
analysisReport.summary = new AnalysisReport.Summary();
analysisReport.summary.targetId = targetCohortId;
analysisReport.summary.outcomeId = outcomeCohort.getId();
}
return analysisReport;
});
}
@Override
@Transactional
public ApplicationBrief getBrief(Integer generationId, String sourceKey) {
IncidenceRateAnalysis analysis = incidenceRateAnalysisRepository.findOne(generationId);
Integer assetId = analysis.getId();
Integer sourceId = sourceRepository.findBySourceKey(sourceKey).getSourceId();
ApplicationBrief applicationBrief = new ApplicationBrief();
applicationBrief.setName(String.format("%s_%s_%s", CommonAnalysisType.INCIDENCE.getCode(), generationId, sourceKey));
applicationBrief.setTitle(prepareAppTitle(generationId, assetId, sourceId, sourceKey));
applicationBrief.setDescription(analysis.getDescription());
return applicationBrief;
}
private String prepareCsvWithCohorts(IncidenceRateAnalysisExportExpression expression) {
final String[] HEADER = {"cohort_id", "cohort_name", "type"};
List<CohortDTO> targetCohorts = expression.targetCohorts;
List<CohortDTO> outcomeCohorts = expression.outcomeCohorts;
try (StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter,
CSVFormat.Builder.create()
.setQuoteMode(QuoteMode.NON_NUMERIC)
.setHeader(HEADER)
.build())) {
for (CohortDTO targetCohort : targetCohorts) {
csvPrinter.printRecord(targetCohort.getId(), targetCohort.getName(), COHORT_TYPE_TARGET);
}
for (CohortDTO outcomeCohort : outcomeCohorts) {
csvPrinter.printRecord(outcomeCohort.getId(), outcomeCohort.getName(), COHORT_TYPE_OUTCOME);
}
return stringWriter.toString();
} catch (IOException e) {
LOG.error("Failed to create a CSV file with Cohort details", e);
throw new InternalServerErrorException();
}
}
private String prepareAppTitle(Integer generationId, Integer assetId, Integer sourceId, String sourceKey) {
return String.format(APP_NAME_FORMAT, generationId, assetId, sourceId, sourceKey);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/CohortPathwaysShinyPackagingService.java | src/main/java/org/ohdsi/webapi/shiny/CohortPathwaysShinyPackagingService.java | package org.ohdsi.webapi.shiny;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.odysseusinc.arachne.commons.api.v1.dto.CommonAnalysisType;
import org.ohdsi.webapi.pathway.PathwayService;
import org.ohdsi.webapi.pathway.domain.PathwayAnalysisGenerationEntity;
import org.ohdsi.webapi.pathway.dto.PathwayAnalysisDTO;
import org.ohdsi.webapi.pathway.dto.PathwayCohortDTO;
import org.ohdsi.webapi.pathway.dto.PathwayPopulationResultsDTO;
import org.ohdsi.webapi.pathway.dto.TargetCohortPathwaysDTO;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.service.ShinyService;
import org.ohdsi.webapi.shiny.summary.DataSourceSummaryConverter;
import org.ohdsi.webapi.source.SourceRepository;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Service;
import java.util.HashSet;
import java.util.Set;
@Service
@ConditionalOnBean(ShinyService.class)
public class CohortPathwaysShinyPackagingService extends CommonShinyPackagingService implements ShinyPackagingService {
private static final String SHINY_COHORT_PATHWAYS_APP_TEMPLATE_FILE_PATH = "/shiny/shiny-cohortPathways.zip";
private static final String APP_TITLE_FORMAT = "Pathway_%s_gv%sx_%s";
private final PathwayService pathwayService;
@Autowired
public CohortPathwaysShinyPackagingService(
@Value("${shiny.atlas.url}") String atlasUrl,
@Value("${shiny.repo.link}") String repoLink,
FileWriter fileWriter,
ManifestUtils manifestUtils,
ObjectMapper objectMapper, PathwayService pathwayService,
SourceRepository sourceRepository,
CDMResultsService cdmResultsService,
DataSourceSummaryConverter dataSourceSummaryConverter) {
super(atlasUrl, repoLink, fileWriter, manifestUtils, objectMapper, sourceRepository, cdmResultsService, dataSourceSummaryConverter);
this.pathwayService = pathwayService;
}
@Override
public CommonAnalysisType getType() {
return CommonAnalysisType.COHORT_PATHWAY;
}
@Override
public String getAppTemplateFilePath() {
return SHINY_COHORT_PATHWAYS_APP_TEMPLATE_FILE_PATH;
}
@Override
public void populateAppData(Integer generationId, String sourceKey, ShinyAppDataConsumers dataConsumers) {
String designJSON = pathwayService.findDesignByGenerationId(generationId.longValue());
PathwayPopulationResultsDTO generationResults = pathwayService.getGenerationResults(generationId.longValue());
ExceptionUtils.throwNotFoundExceptionIfNull(generationResults, String.format("There are no pathway analysis generation results with generation id = %d.", generationId));
ExceptionUtils.throwNotFoundExceptionIfNull(designJSON, String.format("There is no pathway analysis design with generation id = %d.", generationId));
dataConsumers.getTextFiles().accept("design.json", designJSON);
dataConsumers.getJsonObjects().accept("chartData.json", generationResults);
PathwayAnalysisDTO pathwayAnalysisDTO = pathwayService.getByGenerationId(generationId);
PathwayAnalysisGenerationEntity generationEntity = pathwayService.getGeneration(generationId.longValue());
int totalCount = generationResults.getPathwayGroups().stream().mapToInt(TargetCohortPathwaysDTO::getTargetCohortCount).sum();
int personCount = generationResults.getPathwayGroups().stream().mapToInt(TargetCohortPathwaysDTO::getTotalPathwaysCount).sum();
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR.getValue(), getAuthor(pathwayAnalysisDTO));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ASSET_NAME.getValue(), pathwayAnalysisDTO.getName());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ASSET_ID.getValue(), pathwayAnalysisDTO.getId().toString());
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATED_DATE.getValue(), getGenerationStartTime(generationEntity));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_RECORD_COUNT.getValue(), Integer.toString(totalCount));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_PERSON_COUNT.getValue(), Integer.toString(personCount));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_AUTHOR_NOTES.getValue(), getDescription(pathwayAnalysisDTO));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_REFERENCED_COHORTS.getValue(), prepareReferencedCohorts(pathwayAnalysisDTO));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_VERSION_ID.getValue(), Integer.toString(generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_GENERATION_ID.getValue(), Integer.toString(generationId));
dataConsumers.getAppProperties().accept(ShinyConstants.PROPERTY_NAME_ATLAS_LINK.getValue(), String.format("%s/#/pathways/%s", atlasUrl, pathwayAnalysisDTO.getId()));
}
private String getAuthor(PathwayAnalysisDTO pathwayAnalysisDTO) {
if (pathwayAnalysisDTO.getCreatedBy() != null) {
return pathwayAnalysisDTO.getCreatedBy().getLogin();
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String getDescription(PathwayAnalysisDTO pathwayAnalysisDTO) {
if (pathwayAnalysisDTO != null && pathwayAnalysisDTO.getDescription() != null) {
return escapeLineBreaks(pathwayAnalysisDTO.getDescription());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
private String prepareReferencedCohorts(PathwayAnalysisDTO pathwayAnalysisDTO) {
if (pathwayAnalysisDTO == null) {
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
Set<String> referencedCohortNames = new HashSet<>();
for (PathwayCohortDTO eventCohort : pathwayAnalysisDTO.getEventCohorts()) {
referencedCohortNames.add(eventCohort.getName());
}
for (PathwayCohortDTO targetCohort : pathwayAnalysisDTO.getTargetCohorts()) {
referencedCohortNames.add(targetCohort.getName());
}
return String.join("; ", referencedCohortNames);
}
private String getGenerationStartTime(PathwayAnalysisGenerationEntity generationEntity) {
if (generationEntity != null) {
return dateToString(generationEntity.getStartTime());
}
return ShinyConstants.VALUE_NOT_AVAILABLE.getValue();
}
@Override
public ApplicationBrief getBrief(Integer generationId, String sourceKey) {
PathwayAnalysisDTO pathwayAnalysis = pathwayService.getByGenerationId(generationId);
ApplicationBrief applicationBrief = new ApplicationBrief();
applicationBrief.setName(String.format("%s_%s_%s", CommonAnalysisType.COHORT_PATHWAY.getCode(), generationId, sourceKey));
applicationBrief.setTitle(prepareAppTitle(pathwayAnalysis.getId(), generationId, sourceKey));
applicationBrief.setDescription(pathwayAnalysis.getDescription());
return applicationBrief;
}
private String prepareAppTitle(Integer studyAssetId, Integer generationId, String sourceKey) {
return String.format(APP_TITLE_FORMAT, studyAssetId, generationId, sourceKey);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/TemporaryFile.java | src/main/java/org/ohdsi/webapi/shiny/TemporaryFile.java | package org.ohdsi.webapi.shiny;
import java.nio.file.Path;
public class TemporaryFile {
private final String filename;
private final Path file;
public TemporaryFile(String filename, Path file) {
this.filename = filename;
this.file = file;
}
public String getFilename() {
return filename;
}
public Path getFile() {
return file;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/shiny/PackagingStrategies.java | src/main/java/org/ohdsi/webapi/shiny/PackagingStrategies.java | package org.ohdsi.webapi.shiny;
import com.odysseusinc.arachne.commons.utils.ZipUtils;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class PackagingStrategies {
public static PackagingStrategy zip() {
return path -> {
try {
Path appArchive = Files.createTempFile("shinyapp_", ".zip");
ZipUtils.zipDirectory(appArchive, path);
return appArchive;
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
public static PackagingStrategy targz() {
return path -> {
try {
Path archive = Files.createTempFile("shinyapp_", ".tar.gz");
try (OutputStream out = Files.newOutputStream(archive); OutputStream gzout = new GzipCompressorOutputStream(out); ArchiveOutputStream arch = new TarArchiveOutputStream(gzout)) {
packDirectoryFiles(path, arch);
}
return archive;
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
private static void packDirectoryFiles(Path path, ArchiveOutputStream arch) throws IOException {
packDirectoryFiles(path, null, arch);
}
private static void packDirectoryFiles(Path path, String parentDir, ArchiveOutputStream arch) throws IOException {
try (Stream<Path> files = Files.list(path)) {
files.forEach(p -> {
try {
File file = p.toFile();
String filePath = Stream.of(parentDir, p.getFileName().toString()).filter(Objects::nonNull).collect(Collectors.joining("/"));
ArchiveEntry entry = arch.createArchiveEntry(file, filePath);
arch.putArchiveEntry(entry);
if (file.isFile()) {
try (InputStream in = Files.newInputStream(p)) {
IOUtils.copy(in, arch);
}
}
arch.closeArchiveEntry();
if (file.isDirectory()) {
packDirectoryFiles(p, filePath, arch);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.