repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/SpontaneousReport.java | src/main/java/org/ohdsi/webapi/evidence/SpontaneousReport.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author asena5
*/
public class SpontaneousReport {
public SpontaneousReport() {
}
@JsonProperty("CONDITION_CONCEPT_ID")
public String conditionConceptId;
@JsonProperty("CONDITION_CONCEPT_NAME")
public String conditionConceptName;
@JsonProperty("INGREDIENT_CONCEPT_ID")
public String ingredientConceptId;
@JsonProperty("INGREDIENT_CONCEPT_NAME")
public String ingredientConceptName;
@JsonProperty("REPORT_COUNT")
public Integer reportCount;
@JsonProperty("PRR")
public BigDecimal prr;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/EvidenceDetails.java | src/main/java/org/ohdsi/webapi/evidence/EvidenceDetails.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
public class EvidenceDetails {
//metadata on annotation
@JsonProperty("label")
public String label;
@JsonProperty("lastSavedOn")
public String lastSavedOn;
@JsonProperty("wasGeneratedBy")
public String wasGeneratedBy;
@JsonProperty("annotatedAt")
public String annotatedAt;
@JsonProperty("annotatedBy")
public String annotatedBy;
@JsonProperty("motivatedBy")
public String motivatedBy;
@JsonProperty("metaType")
public String metaType;
//Source(Target)
@JsonProperty("target")
public String target;
@JsonProperty("studyType")
public String studyType;
@JsonProperty("sourceURL")
public String sourceURL;
@JsonProperty("text")
public String text;
//Tagging(multiple Body)
@JsonProperty("bodyLabel")
public String bodyLabel;
@JsonProperty("description")
public String description;
@JsonProperty("tagType")
public String tagType;
@JsonProperty("ImedsDrug")
public String ImedsDrug;
@JsonProperty("ImedsHoi")
public String ImedsHoi;
@JsonProperty("predicateLabel")
public String predicateLabel;
//Selector
@JsonProperty("selector")
public String selector;
@JsonProperty("splSection")
public String splSection;
@JsonProperty("exact")
public String exact;
@JsonProperty("prefix")
public String prefix;
@JsonProperty("postfix")
public String postfix;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/EvidenceUniverse.java | src/main/java/org/ohdsi/webapi/evidence/EvidenceUniverse.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author fdefalco
*/
public class EvidenceUniverse {
@JsonProperty("evidence_id")
public int evidence_id;
@JsonProperty("condition_concept_id")
public int condition_concept_id;
@JsonProperty("condition_concept_name")
public String condition_concept_name;
@JsonProperty("ingredient_concept_id")
public int ingredient_concept_id;
@JsonProperty("ingredient_concept_name")
public String ingredient_concept_name;
@JsonProperty("evidence_type")
public String evidence_type;
@JsonProperty("supports")
public Character supports;
@JsonProperty("statistic_value")
public BigDecimal statistic_value;
@JsonProperty("evidence_linkouts")
public String evidence_linkouts;
@JsonProperty("totalNumber")
public int totalNumber;
@JsonProperty("hasEvidence")
public String hasEvidence;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/DrugEvidence.java | src/main/java/org/ohdsi/webapi/evidence/DrugEvidence.java | package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author rkboyce and m_rasteger
*/
@JsonInclude(Include.NON_NULL)
public class DrugEvidence {
@JsonProperty("evidenceSource")
public String evidenceSource;
@JsonProperty("relationshipType")
public String relationshipType;
@JsonProperty("statisticType")
public String statisticType;
@JsonProperty("statisticValue")
public BigDecimal statisticValue;
@JsonProperty("hoiConceptId")
public String hoiConceptId;
@JsonProperty("hoiConceptName")
public String hoiConceptName;
@JsonProperty("uniqueIdentifier")
public String uniqueIdentifier;
@JsonProperty("uniqueIdentifierType")
public String uniqueIdentifierType;
@JsonProperty("count")
public Integer count;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/DrugLabel.java | src/main/java/org/ohdsi/webapi/evidence/DrugLabel.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.io.Serializable;
import java.sql.Timestamp;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author fdefalco
*/
@Entity(name = "DrugLabel")
@Table(name="drug_labels")
public class DrugLabel implements Serializable {
@Id
@Column(name="drug_label_id")
private int drugLabelId;
@Column(name="setid")
private String setid;
@Column(name="search_name")
private String searchName;
@Column(name="ingredient_concept_name")
private String ingredientConceptName;
@Column(name="ingredient_concept_id")
private int ingredientConceptId;
@Column(name="cohort_id")
private Integer cohortId;
@Column(name="image_url")
private String imageUrl;
@Column(name="date")
private Timestamp date;
public int getDrugLabelId() {
return drugLabelId;
}
public void setDrugLabelId(int drugLabelId) {
this.drugLabelId = drugLabelId;
}
public String getSetid() {
return setid;
}
public void setSetid(String setid) {
this.setid = setid;
}
public String getSearchName() {
return searchName;
}
public void setSearchName(String searchName) {
this.searchName = searchName;
}
public String getIngredientConceptName() {
return ingredientConceptName;
}
public void setIngredientConceptName(String ingredientConceptName) {
this.ingredientConceptName = ingredientConceptName;
}
public int getIngredientConceptId() {
return ingredientConceptId;
}
public void setIngredientConceptId(int ingredientConceptId) {
this.ingredientConceptId = ingredientConceptId;
}
public Integer getCohortId() {
return cohortId;
}
public void setCohortId(Integer cohortId) {
this.cohortId = cohortId;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public Timestamp getDate() {
return date;
}
public void setDate(Timestamp date) {
this.date = date;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/HoiEvidence.java | src/main/java/org/ohdsi/webapi/evidence/HoiEvidence.java | package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author rkboyce and m_rasteger
*/
@JsonInclude(Include.NON_NULL)
public class HoiEvidence {
@JsonProperty("evidenceSource")
public String evidenceSource;
@JsonProperty("relationshipType")
public String relationshipType;
@JsonProperty("statisticType")
public String statisticType;
@JsonProperty("statisticValue")
public BigDecimal statisticValue;
@JsonProperty("drugConceptId")
public String drugConceptId;
@JsonProperty("drugConceptName")
public String drugConceptName;
@JsonProperty("uniqueIdentifier")
public String uniqueIdentifier;
@JsonProperty("uniqueIdentifierType")
public String uniqueIdentifierType;
@JsonProperty("count")
public Integer count;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/EvidenceSearch.java | src/main/java/org/ohdsi/webapi/evidence/EvidenceSearch.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
*
* @author asena5
*/
public class EvidenceSearch {
public EvidenceSearch() {
}
@JsonProperty("CONDITION_CONCEPT_LIST")
public String[] conditionConceptList;
@JsonProperty("INGREDIENT_CONCEPT_LIST")
public String[] ingredientConceptList;
@JsonProperty("EVIDENCE_TYPE_LIST")
public String[] evidenceTypeList;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/ConceptOfInterestMapping.java | src/main/java/org/ohdsi/webapi/evidence/ConceptOfInterestMapping.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author asena5
*/
@Entity(name = "ConceptOfInterestMapping")
@Table(name="concept_of_interest")
public class ConceptOfInterestMapping implements Serializable {
@Id
@Column(name = "id")
private Integer id;
@Column(name = "concept_id")
private Integer conceptId;
@Column(name = "concept_of_interest_id")
private Integer conceptOfInterestId;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getConceptId() {
return conceptId;
}
public void setConceptId(Integer conceptId) {
this.conceptId = conceptId;
}
public Integer getConceptOfInterestId() {
return conceptOfInterestId;
}
public void setConceptOfInterestId(Integer conceptOfInterestId) {
this.conceptOfInterestId = conceptOfInterestId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/DrugHoiEvidence.java | src/main/java/org/ohdsi/webapi/evidence/DrugHoiEvidence.java | package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author rkboyce and m_rasteger
*/
@JsonInclude(Include.NON_NULL)
public class DrugHoiEvidence {
@JsonProperty("evidenceSource")
public String evidenceSource;
@JsonProperty("relationshipType")
public String relationshipType;
@JsonProperty("mappingType")
public String mappingType;
@JsonProperty("statisticType")
public String statisticType;
@JsonProperty("statisticValue")
public BigDecimal statisticValue;
@JsonProperty("drugConceptId")
public String drugConceptId;
@JsonProperty("drugConceptName")
public String drugConceptName;
@JsonProperty("hoiConceptId")
public String hoiConceptId;
@JsonProperty("hoiConceptName")
public String hoiConceptName;
@JsonProperty("uniqueIdentifier")
public String uniqueIdentifier;
@JsonProperty("uniqueIdentifierType")
public String uniqueIdentifierType;
@JsonProperty("count")
public Integer count;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/ConceptOfInterestMappingRepository.java | src/main/java/org/ohdsi/webapi/evidence/ConceptOfInterestMappingRepository.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.util.Collection;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
/**
*
* @author asena5
*/
public interface ConceptOfInterestMappingRepository extends CrudRepository<ConceptOfInterestMapping, Integer> {
Collection<ConceptOfInterestMapping> findAllByConceptId(Integer conceptId);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/CohortStudyMappingRepository.java | src/main/java/org/ohdsi/webapi/evidence/CohortStudyMappingRepository.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.util.Collection;
import org.springframework.data.repository.CrudRepository;
/**
*
* @author fdefalco
*/
public interface CohortStudyMappingRepository extends CrudRepository<CohortStudyMapping, Integer> {
Collection<CohortStudyMapping> findByCohortDefinitionId(Integer cohortId);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/ConceptCohortMappingRepository.java | src/main/java/org/ohdsi/webapi/evidence/ConceptCohortMappingRepository.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.util.Collection;
import org.springframework.data.repository.CrudRepository;
/**
*
* @author fdefalco
*/
public interface ConceptCohortMappingRepository extends CrudRepository<ConceptCohortMapping, Integer> {
Collection<ConceptCohortMapping> findByConceptId(Integer conceptId);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/DrugRollUpEvidence.java | src/main/java/org/ohdsi/webapi/evidence/DrugRollUpEvidence.java | package org.ohdsi.webapi.evidence;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
/**
*
* @author rkboyce and ericaVoss
*/
@JsonInclude(Include.NON_NULL)
public class DrugRollUpEvidence {
@JsonProperty("REPORT_NAME")
public String reportName;
@JsonProperty("INGREDIENT_ID")
public Integer ingredientId;
@JsonProperty("INGREDIENT")
public String ingredientName;
@JsonProperty("CLINICAL_DRUG_ID")
public Integer clinicalDrugId;
@JsonProperty("CLINICAL_DRUG")
public String clinicalDrugName;
@JsonProperty("HOI_ID")
public Integer hoiId;
@JsonProperty("HOI")
public String hoiName;
@JsonProperty("MEDLINE_CT_COUNT")
public Integer pubmedMeshCTcount;
@JsonProperty("MEDLINE_CASE_COUNT")
public Integer pubmedMeshCaseReportcount;
@JsonProperty("MEDLINE_OTHER_COUNT")
public Integer pubmedMeshOthercount;
@JsonProperty("CTD_CHEMICAL_DISEASE_COUNT")
public Integer ctdChemicalDiseaseCount;
@JsonProperty("SPLICER_COUNT")
public Integer splicerCount;
@JsonProperty("EU_SPC_COUNT")
public Integer euSPCcount;
@JsonProperty("SEMMEDDB_CT_COUNT")
public Integer semmedCTcount;
@JsonProperty("SEMMEDDB_CASE_COUNT")
public Integer semmedCaseReportcount;
@JsonProperty("SEMMEDDB_OTHER_COUNT")
public Integer semmedOthercount;
@JsonProperty("SEMMEDDB_NEG_CT_COUNT")
public Integer semmedNegCTcount;
@JsonProperty("SEMMEDDB_NEG_CASE_COUNT")
public Integer semmedNegCaseReportcount;
@JsonProperty("SEMMEDDB_NEG_OTHER_COUNT")
public Integer semmedNegOthercount;
@JsonProperty("AERS_REPORT_COUNT")
public Integer aersReportCount;
@JsonProperty("PRR")
public BigDecimal prr;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/ConceptCohortMapping.java | src/main/java/org/ohdsi/webapi/evidence/ConceptCohortMapping.java | /*
* Copyright 2015 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.evidence;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author fdefalco
*/
@Entity(name = "ConceptCohortMapping")
@Table(name="cohort_concept_map")
public class ConceptCohortMapping implements Serializable {
@Id
@Column(name = "cohort_definition_id")
private Integer cohortDefinitionId;
@Column(name = "cohort_definition_name")
private String cohortDefinitionName;
@Column(name = "concept_id")
private Integer conceptId;
public Integer getCohortDefinitionId() {
return cohortDefinitionId;
}
public void setCohortDefinitionId(Integer cohortDefinitionId) {
this.cohortDefinitionId = cohortDefinitionId;
}
public String getCohortDefinitionName() {
return cohortDefinitionName;
}
public void setCohortDefinitionName(String cohortDefinitionName) {
this.cohortDefinitionName = cohortDefinitionName;
}
public Integer getConceptId() {
return conceptId;
}
public void setConceptId(Integer conceptId) {
this.conceptId = conceptId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/CohortStudyMapping.java | src/main/java/org/ohdsi/webapi/evidence/CohortStudyMapping.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.evidence;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author asena5
*/
@Entity(name = "CohortStudyMapping")
@Table(name="cohort_study")
public class CohortStudyMapping {
@Id
@Column(name = "cohort_study_id")
private Integer cohortStudyId;
@Column(name = "cohort_definition_id")
private Integer cohortDefinitionId;
@Column(name = "study_type")
private Integer studyType;
@Column(name = "study_name")
private String studyName;
@Column(name = "study_URL")
private String studyUrl;
public Integer getCohortStudyId() {
return cohortStudyId;
}
public void setCohortStudyId(Integer cohortStudyId) {
this.cohortStudyId = cohortDefinitionId;
}
public Integer getCohortDefinitionId() {
return cohortDefinitionId;
}
public void setCohortDefinitionId(Integer cohortDefinitionId) {
this.cohortDefinitionId = cohortDefinitionId;
}
public Integer getStudyType() {
return studyType;
}
public void setStudyType(Integer studyType) {
this.studyType = studyType;
}
public String getStudyName() {
return studyName;
}
public void setStudyName(String studyName) {
this.studyName = studyName;
}
public String getStudyUrl() {
return studyUrl;
}
public void setStudyUrl(String studyUrl) {
this.studyUrl = studyUrl;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/EvidenceInfo.java | src/main/java/org/ohdsi/webapi/evidence/EvidenceInfo.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.evidence;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
*
* @author rkboyce
*/
public class EvidenceInfo {
@JsonProperty("title")
public String title;
@JsonProperty("description")
public String description;
@JsonProperty("provenance")
public String provenance;
@JsonProperty("contributor")
public String contributor;
@JsonProperty("contactName")
public String contactName;
@JsonProperty("creationDate")
public Date creationDate;
@JsonProperty("coverageStartDate")
public Date coverageStartDate;
@JsonProperty("coverageEndDate")
public Date coverageEndDate;
@JsonProperty("versionIdentifier")
public String versionIdentifier;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/DrugLabelRepository.java | src/main/java/org/ohdsi/webapi/evidence/DrugLabelRepository.java | package org.ohdsi.webapi.evidence;
import java.util.Collection;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
/**
*
* @author fdefalco
*/
public interface DrugLabelRepository extends CrudRepository<DrugLabel, Integer> {
Collection<DrugLabel> findAllBySetid(String setid);
Collection<DrugLabel> findAllBySearchName(String searchName);
Collection<DrugLabel> findAllByIngredientConceptId(int ingredientConceptId);
@Query("SELECT d FROM DrugLabel d WHERE d.searchName LIKE %:searchTerm%")
Collection<DrugLabel> searchNameContainsTerm(@Param("searchTerm") String searchTerm);
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlDTO.java | src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlDTO.java | package org.ohdsi.webapi.evidence.negativecontrols;
import java.io.Serializable;
/**
*
* @author asena5
*/
public class NegativeControlDTO implements Serializable {
public int conceptSetId;
public int sourceId;
public String conceptSetName;
public int negativeControl;
public int conceptId;
public String conceptName;
public String domainId;
public Long sortOrder;
public Long descendantPmidCount;
public Long exactPmidCount ;
public Long parentPmidCount ;
public Long ancestorPmidCount;
public int indCi;
public int tooBroad;
public int drugInduced;
public int pregnancy;
public Long descendantSplicerCount;
public Long exactSplicerCount;
public Long parentSplicerCount;
public Long ancestorSplicerCount;
public Long descendantFaersCount;
public Long exactFaersCount;
public Long parentFaersCount;
public Long ancestorFaersCount;
public int userExcluded;
public int userIncluded;
public int optimizedOut;
public int notPrevalent;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlTaskParameters.java | src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlTaskParameters.java | package org.ohdsi.webapi.evidence.negativecontrols;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ohdsi.webapi.source.Source;
import org.springframework.jdbc.core.JdbcTemplate;
public class NegativeControlTaskParameters {
private String jobName;
private Source source;
private int conceptSetId;
private String conceptSetName;
private String sourceKey;
private String[] conceptsOfInterest;
private int csToInclude = 0;
private String csToIncludeSQL = "";
private int csToExclude = 0;
private String csToExcludeSQL = "";
private String conceptDomainId;
private String outcomeOfInterest;
private JdbcTemplate jdbcTemplate;
private String sourceDialect;
private String ohdsiSchema;
private String translatedSchema;
public String getSourceKey() {
return sourceKey;
}
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
public Source getSource() {
return source;
}
public void setSource(Source source) {
this.source = source;
}
/**
* @return the jobName
*/
public String getJobName() {
return jobName;
}
/**
* @param jobName the jobName to set
*/
public void setJobName(String jobName) {
this.jobName = jobName;
}
/**
* @return the conceptsOfInterest
*/
public String[] getConceptsOfInterest() {
return conceptsOfInterest;
}
/**
* @param conceptsOfInterest the conceptsOfInterest to set
*/
public void setConceptsOfInterest(String[] conceptsOfInterest) {
this.conceptsOfInterest = conceptsOfInterest;
}
@Override
public String toString() {
try {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
} catch (Exception e) {
}
return super.toString();
}
/**
* @return the conceptSetId
*/
public int getConceptSetId() {
return conceptSetId;
}
/**
* @return the conceptSetName
*/
public String getConceptSetName() {
return conceptSetName;
}
/**
* @param conceptSetId the conceptSetId to set
*/
public void setConceptSetId(int conceptSetId) {
this.conceptSetId = conceptSetId;
}
/**
* @param conceptSetName the conceptSetName to set
*/
public void setConceptSetName(String conceptSetName) {
this.conceptSetName = conceptSetName;
}
/**
* @return the conceptDomainId
*/
public String getConceptDomainId() {
return conceptDomainId;
}
/**
* @return the outcomeOfInterest
*/
public String getOutcomeOfInterest() {
return outcomeOfInterest;
}
/**
* @param conceptDomainId the conceptDomainId to set
*/
public void setConceptDomainId(String conceptDomainId) {
this.conceptDomainId = conceptDomainId;
}
/**
* @param outcomeOfInterest the outcomeOfInterest to set
*/
public void setOutcomeOfInterest(String outcomeOfInterest) {
this.outcomeOfInterest = outcomeOfInterest;
}
/**
* @return the jdbcTemplate
*/
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
/**
* @param jdbcTemplate the jdbcTemplate to set
*/
public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
/**
* @return the sourceDialect
*/
public String getSourceDialect() {
return sourceDialect;
}
/**
* @param sourceDialect the sourceDialect to set
*/
public void setSourceDialect(String sourceDialect) {
this.sourceDialect = sourceDialect;
}
/**
* @return the ohdsiSchema
*/
public String getOhdsiSchema() {
return ohdsiSchema;
}
/**
* @param ohdsiSchema the ohdsiSchema to set
*/
public void setOhdsiSchema(String ohdsiSchema) {
this.ohdsiSchema = ohdsiSchema;
}
/**
* @return the csToInclude
*/
public int getCsToInclude() {
return csToInclude;
}
/**
* @param csToInclude the csToInclude to set
*/
public void setCsToInclude(int csToInclude) {
this.csToInclude = csToInclude;
}
/**
* @return the csToExclude
*/
public int getCsToExclude() {
return csToExclude;
}
/**
* @param conceptsToExclude the csToExclude to set
*/
public void setCsToExclude(int conceptsToExclude) {
this.csToExclude = conceptsToExclude;
}
/**
* @return the csToIncludeSQL
*/
public String getCsToIncludeSQL() {
return csToIncludeSQL;
}
/**
* @param csToIncludeSQL the csToIncludeSQL to set
*/
public void setCsToIncludeSQL(String csToIncludeSQL) {
this.csToIncludeSQL = csToIncludeSQL;
}
/**
* @return the csToExcludeSQL
*/
public String getCsToExcludeSQL() {
return csToExcludeSQL;
}
/**
* @param csToExcludeSQL the csToExcludeSQL to set
*/
public void setCsToExcludeSQL(String csToExcludeSQL) {
this.csToExcludeSQL = csToExcludeSQL;
}
/**
* @return the translatedSchema
*/
public String getTranslatedSchema() {
return translatedSchema;
}
/**
* @param translatedSchema the translatedSchema to set
*/
public void setTranslatedSchema(String translatedSchema) {
this.translatedSchema = translatedSchema;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlMapper.java | src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlMapper.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.ohdsi.webapi.evidence.negativecontrols;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.springframework.jdbc.core.RowMapper;
/**
*
* @author asena5
*/
public class NegativeControlMapper implements RowMapper<NegativeControlDTO> {
@Override
public NegativeControlDTO mapRow(ResultSet rs, int rowNum) throws SQLException {
NegativeControlDTO record = new NegativeControlDTO();
record.conceptSetId = rs.getInt("CONCEPT_SET_ID");
record.negativeControl = rs.getInt("NEGATIVE_CONTROL");
record.conceptId = rs.getInt("CONCEPT_ID");
record.conceptName = rs.getString("CONCEPT_NAME");
record.sortOrder = rs.getLong("SORT_ORDER");
record.descendantPmidCount = rs.getLong("DESCENDANT_PMID_CNT");
record.exactPmidCount = rs.getLong("EXACT_PMID_CNT");
record.parentPmidCount = rs.getLong("PARENT_PMID_CNT");
record.ancestorPmidCount = rs.getLong("ANCESTOR_PMID_CNT");
record.indCi = rs.getInt("IND_CI");
record.tooBroad = rs.getInt("TOO_BROAD");
record.drugInduced = rs.getInt("DRUG_INDUCED");
record.pregnancy = rs.getInt("PREGNANCY");
record.descendantSplicerCount = rs.getLong("DESCENDANT_SPLICER_CNT");
record.exactSplicerCount = rs.getLong("EXACT_SPLICER_CNT");
record.parentSplicerCount = rs.getLong("PARENT_SPLICER_CNT");
record.ancestorSplicerCount = rs.getLong("ANCESTOR_SPLICER_CNT");
record.descendantFaersCount = rs.getLong("DESCENDANT_FAERS_CNT");
record.exactFaersCount = rs.getLong("EXACT_FAERS_CNT");
record.parentFaersCount = rs.getLong("PARENT_FAERS_CNT");
record.ancestorFaersCount = rs.getLong("ANCESTOR_FAERS_CNT");
record.userExcluded = rs.getInt("USER_EXCLUDED");
record.userIncluded = rs.getInt("USER_INCLUDED");
record.optimizedOut = rs.getInt("OPTIMIZED_OUT");
record.notPrevalent = rs.getInt("NOT_PREVALENT");
return record;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlTasklet.java | src/main/java/org/ohdsi/webapi/evidence/negativecontrols/NegativeControlTasklet.java | package org.ohdsi.webapi.evidence.negativecontrols;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationInfo;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationInfoRepository;
import org.ohdsi.webapi.conceptset.ConceptSetGenerationType;
import org.ohdsi.webapi.service.EvidenceService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
public class NegativeControlTasklet implements Tasklet {
private static final Logger log = LoggerFactory.getLogger(NegativeControlTasklet.class);
private final NegativeControlTaskParameters task;
private final JdbcTemplate evidenceJdbcTemplate;
private final JdbcTemplate ohdsiJdbcTemplate;
private final TransactionTemplate transactionTemplate;
private final ConceptSetGenerationInfoRepository conceptSetGenerationInfoRepository;
//private final CohortResultsAnalysisRunner analysisRunner;
public NegativeControlTasklet(NegativeControlTaskParameters task,
final JdbcTemplate evidenceJdbcTemplate,
final JdbcTemplate ohdsiJdbcTemplate,
final TransactionTemplate transactionTemplate,
final ConceptSetGenerationInfoRepository repository,
String sourceDialect) {
this.task = task;
this.evidenceJdbcTemplate = evidenceJdbcTemplate;
this.ohdsiJdbcTemplate = ohdsiJdbcTemplate;
this.transactionTemplate = transactionTemplate;
this.conceptSetGenerationInfoRepository = repository;
//this.analysisRunner = new CohortResultsAnalysisRunner(sourceDialect, visualizationDataRepository);
}
private ConceptSetGenerationInfo findBySourceId(Collection<ConceptSetGenerationInfo> infoList, Integer sourceId) {
for (ConceptSetGenerationInfo info : infoList) {
if (info.getSourceId().equals(sourceId)) {
return info;
}
}
return null;
}
@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) throws Exception {
Date startTime = Calendar.getInstance().getTime();
Map<String, Object> jobParams = chunkContext.getStepContext().getJobParameters();
Integer conceptSetId = Integer.valueOf(jobParams.get("concept_set_id").toString());
final Integer sourceId = Integer.valueOf(jobParams.get("source_id").toString());
boolean isValid = false;
DefaultTransactionDefinition initTx = new DefaultTransactionDefinition();
initTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.transactionTemplate.getTransactionManager().getTransaction(initTx);
ConceptSetGenerationInfo info = findBySourceId(this.conceptSetGenerationInfoRepository.findAllByConceptSetId(conceptSetId), sourceId);
if (info == null) {
info = new ConceptSetGenerationInfo();
info.setConceptSetId(conceptSetId);
info.setSourceId(sourceId);
info.setGenerationType(ConceptSetGenerationType.NEGATIVE_CONTROLS);
}
info.setParams(jobParams.get("params").toString());
info.setIsValid(isValid);
info.setStartTime(startTime);
info.setStatus(GenerationStatus.RUNNING);
this.conceptSetGenerationInfoRepository.save(info);
this.transactionTemplate.getTransactionManager().commit(initStatus);
try {
final int[] ret = this.transactionTemplate.execute(new TransactionCallback<int[]>() {
@Override
public int[] doInTransaction(final TransactionStatus status) {
int[] result = new int[0];
log.debug("Entering tasklet");
String negativeControlSql = EvidenceService.getNegativeControlSql(task);
log.debug("Processing negative controls with: {}", negativeControlSql);
NegativeControlTasklet.this.evidenceJdbcTemplate.execute(negativeControlSql);
return result;
}
});
isValid = true;
} catch (final TransactionException e) {
log.error(e.getMessage(), e);
throw e;//FAIL job status
} finally {
DefaultTransactionDefinition completeTx = new DefaultTransactionDefinition();
completeTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus completeStatus = this.transactionTemplate.getTransactionManager().getTransaction(completeTx);
info = findBySourceId(this.conceptSetGenerationInfoRepository.findAllByConceptSetId(conceptSetId), sourceId);
Date endTime = Calendar.getInstance().getTime();
info.setExecutionDuration(new Integer((int) (endTime.getTime() - startTime.getTime())));
info.setIsValid(isValid);
GenerationStatus status = isValid ? GenerationStatus.COMPLETE : GenerationStatus.ERROR;
info.setStatus(status);
this.conceptSetGenerationInfoRepository.save(info);
this.transactionTemplate.getTransactionManager().commit(completeStatus);
}
return RepeatStatus.FINISHED;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/CDMResultsCacheTasklet.java | src/main/java/org/ohdsi/webapi/cdmresults/CDMResultsCacheTasklet.java | /*
* Copyright 2017 fdefalco.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.cdmresults;
import org.ohdsi.webapi.cdmresults.service.CDMCacheService;
import org.ohdsi.webapi.source.Source;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
/**
* @author fdefalco
*/
public class CDMResultsCacheTasklet implements Tasklet {
private final Source source;
private final CDMCacheService cdmCacheService;
public CDMResultsCacheTasklet(Source source, CDMCacheService cdmCacheService) {
this.source = source;
this.cdmCacheService = cdmCacheService;
}
@Override
public RepeatStatus execute(final StepContribution contribution, final ChunkContext chunkContext) {
cdmCacheService.warm(this.source);
return RepeatStatus.FINISHED;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/AchillesCacheTasklet.java | src/main/java/org/ohdsi/webapi/cdmresults/AchillesCacheTasklet.java | package org.ohdsi.webapi.cdmresults;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.ohdsi.webapi.achilles.service.AchillesCacheService;
import org.ohdsi.webapi.report.CDMResultsAnalysisRunner;
import org.ohdsi.webapi.service.CDMResultsService;
import org.ohdsi.webapi.source.Source;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.jdbc.core.JdbcTemplate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class AchillesCacheTasklet implements Tasklet {
private static final Logger LOG = LoggerFactory.getLogger(AchillesCacheTasklet.class);
public static final String DASHBOARD = "dashboard";
public static final String PERSON = "person";
public static final String DATA_DENSITY = "datadensity";
public static final String DEATH = "death";
public static final String OBSERVATION_PERIOD = "observationperiod";
public static final String TREEMAP = "treemap";
public static final String DRILLDOWN = "drilldown";
public static final Map<String, BiFunction<CDMResultsService, String, Object>> simpleDomains = new HashMap<>();
private final static String DEFAULT_CONCEPT_ID_NAME = "conceptId";
// domain with list of scripts for getting domain data and name of the concept_id column for each report
// concept_id column name must be in camel case
private static final Map<String, List<Pair<String, String>>> treemapDomains = new HashMap<>();
private final Source source;
private final CDMResultsService service;
private final AchillesCacheService cacheService;
private final CDMResultsAnalysisRunner analysisRunner;
private final ObjectMapper objectMapper;
static {
// domains with concrete call endpoints
simpleDomains.put(DASHBOARD, CDMResultsService::getRawDashboard);
simpleDomains.put(PERSON, CDMResultsService::getRawPerson);
simpleDomains.put(DATA_DENSITY, CDMResultsService::getRawDataDesity);
simpleDomains.put(DEATH, CDMResultsService::getRawDeath);
simpleDomains.put(OBSERVATION_PERIOD, CDMResultsService::getRawObservationPeriod);
// domains with common call endpoint
// each entry contains domain and mapping of script and concept id column name in script
treemapDomains.put("condition", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
addAll(getColumnNames("conditionConceptId", "byType"));
}});
treemapDomains.put("conditionera", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "lengthOfEra",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
}});
treemapDomains.put("drug", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "frequencyDistribution",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
addAll(getColumnNames("drugConceptId", "byType", "daysSupplyDistribution",
"quantityDistribution", "refillsDistribution"));
}});
treemapDomains.put("drugera", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "lengthOfEra",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
}});
treemapDomains.put("measurement", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "frequencyDistribution",
"lowerLimitDistribution", "measurementValueDistribution", "prevalenceByGenderAgeYear",
"prevalenceByMonth", "upperLimitDistribution"));
addAll(getColumnNames("observationConceptId", "byOperator", "byValueAsConcept"));
addAll(getColumnNames("measurementConceptId", "byType", "recordsByUnit",
"valuesRelativeToNorm"));
}});
treemapDomains.put("observation", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "frequencyDistribution",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
addAll(getColumnNames("observationConceptId", "byQualifier", "byType",
"byValueAsConcept"));
}});
treemapDomains.put("procedure", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "frequencyDistribution",
"prevalenceByGenderAgeYear", "prevalenceByMonth"));
addAll(getColumnNames("procedureConceptId", "byType"));
}});
treemapDomains.put("visit", new ArrayList<Pair<String, String>>() {{
addAll(getDefaultColumnNames("ageAtFirstOccurrence", "prevalenceByGenderAgeYear",
"prevalenceByMonth", "visitDurationByType"));
}});
}
public AchillesCacheTasklet(Source source,
CDMResultsService service,
AchillesCacheService cacheService,
CDMResultsAnalysisRunner analysisRunner,
ObjectMapper objectMapper) {
this.source = source;
this.service = service;
this.cacheService = cacheService;
this.analysisRunner = analysisRunner;
this.objectMapper = objectMapper;
}
// prepare list of reports where concept id column name is equal to default one
private static List<Pair<String, String>> getDefaultColumnNames(String... reports) {
return getColumnNames(DEFAULT_CONCEPT_ID_NAME, reports);
}
// prepare list of reports where concept id column name is custom
private static List<Pair<String, String>> getColumnNames(String columnName, String... reports) {
return Arrays.stream(reports)
.map(report -> new ImmutablePair<>(report, columnName))
.collect(Collectors.toList());
}
private void cacheDrilldown(String domain) {
JdbcTemplate jdbcTemplate = service.getSourceJdbcTemplate(source);
// get drilldown reports for all concepts
JsonNode reports = analysisRunner.getDrilldown(jdbcTemplate, domain, null, source);
// get reports for each concept
Map<Integer, ObjectNode> conceptNodes = splitReportByConcepts(domain, reports);
cacheService.saveDrilldownCacheMap(source, domain, conceptNodes);
}
private List<Integer> getConceptIds(String domain) {
ArrayNode treeMap = service.getTreemap(domain, source.getSourceKey());
Stream<JsonNode> nodes = IntStream.range(0, treeMap.size()).mapToObj(treeMap::get);
return nodes.map(node -> node.get("conceptId").intValue())
.distinct()
.collect(Collectors.toList());
}
private Map<Integer, ObjectNode> splitReportByConcepts(String domain, JsonNode reports) {
List<Pair<String, String>> drilldownScripts = treemapDomains.get(domain);
if (reports.size() != drilldownScripts.size()) {
throw new RuntimeException("Drilldown reports size must be equal to number of scripts. Check achilles caching!");
}
Map<Integer, ObjectNode> conceptNodes = new HashMap<>();
drilldownScripts.forEach(columnName -> copyItemToConceptReport(domain, reports, conceptNodes, columnName));
return conceptNodes;
}
private void copyItemToConceptReport(String domain, JsonNode reports,
Map<Integer, ObjectNode> conceptNodes, Pair<String, String> columnName) {
String reportName = columnName.getKey();
String conceptColumnName = columnName.getValue();
JsonNode report = reports.get(reportName);
List<Integer> conceptIds = getConceptIds(domain);
Objects.requireNonNull(report).forEach(item -> {
int conceptId = item.get(conceptColumnName).intValue();
// ignore data for concept which is absent in treemap
if (conceptIds.contains(conceptId)) {
ArrayNode reportNode = getReport(conceptNodes, reportName, conceptId);
reportNode.add(item);
}
});
}
private ArrayNode getReport(Map<Integer, ObjectNode> conceptNodes, String reportName, int conceptId) {
// get node for given concept or create new one
ObjectNode conceptReport = conceptNodes.computeIfAbsent(conceptId, x -> objectMapper.createObjectNode());
// get concept report with given name or create new one
ArrayNode reportNode = (ArrayNode) conceptReport.get(reportName);
if (Objects.isNull(reportNode)) {
reportNode = conceptReport.putArray(reportName);
}
return reportNode;
}
private void cacheDomain(String domain, BiFunction<CDMResultsService, String, Object> function) {
Object result = function.apply(service, source.getSourceKey());
cache(domain, result);
}
private void cacheTreemap(String domain) {
Object result = service.getRawTreeMap(domain, source.getSourceKey());
cache(TREEMAP + "_" + domain, result);
}
private void cache(String cacheName, Object result) {
try {
cacheService.createCache(source, cacheName, result);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
simpleDomains.forEach(this::cacheDomain);
treemapDomains.keySet().forEach(this::cacheTreemap);
treemapDomains.keySet().forEach(this::cacheDrilldown);
return RepeatStatus.FINISHED;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/DescendantRecordCount.java | src/main/java/org/ohdsi/webapi/cdmresults/DescendantRecordCount.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public class DescendantRecordCount {
private int id;
private Long recordCount;
private Long descendantRecordCount;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Long getRecordCount() {
return recordCount;
}
public void setRecordCount(Long recordCount) {
this.recordCount = recordCount;
}
public Long getDescendantRecordCount() {
return descendantRecordCount;
}
public void setDescendantRecordCount(Long descendantRecordCount) {
this.descendantRecordCount = descendantRecordCount;
}
public List<Long> getValues() {
return Arrays.asList(recordCount, descendantRecordCount);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DescendantRecordCount that = (DescendantRecordCount) o;
return id == that.id;
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/DescendantRecordAndPersonCount.java | src/main/java/org/ohdsi/webapi/cdmresults/DescendantRecordAndPersonCount.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults;
import java.util.Arrays;
import java.util.List;
public class DescendantRecordAndPersonCount extends DescendantRecordCount {
private Long personCount;
private Long descendantPersonCount;
public Long getPersonCount() {
return personCount;
}
public void setPersonCount(Long personCount) {
this.personCount = personCount;
}
public Long getDescendantPersonCount() {
return descendantPersonCount;
}
public void setDescendantPersonCount(Long descendantPersonCount) {
this.descendantPersonCount = descendantPersonCount;
}
public List<Long> getValues() {
return Arrays.asList(getRecordCount(), getDescendantRecordCount(), personCount, descendantPersonCount);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/service/CDMCacheBatchService.java | src/main/java/org/ohdsi/webapi/cdmresults/service/CDMCacheBatchService.java | package org.ohdsi.webapi.cdmresults.service;
import java.util.ArrayList;
import java.util.Arrays;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.ohdsi.webapi.cdmresults.repository.CDMCacheRepository;
import org.ohdsi.webapi.source.Source;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
@Service
public class CDMCacheBatchService {
private final CDMCacheRepository cdmCacheRepository;
private final EntityManager entityManager;
public CDMCacheBatchService(CDMCacheRepository cdmCacheRepository, EntityManager entityManager) {
this.cdmCacheRepository = cdmCacheRepository;
this.entityManager = entityManager;
}
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<CDMCacheEntity> save(Source source, List<CDMCacheEntity> entities) {
List<Integer> conceptIds = entities.stream()
.map(CDMCacheEntity::getConceptId)
.collect(Collectors.toList());
Map<Integer,CDMCacheEntity> cacheEntities = cdmCacheRepository.findBySourceAndConceptIds(source.getSourceId(), conceptIds)
.stream()
.collect(Collectors.toMap(CDMCacheEntity::getConceptId, Function.identity()));
List<CDMCacheEntity> modified = new ArrayList<>();
entities.forEach(entity -> {
// check if the entity with given cache name already exists
CDMCacheEntity processedEntity;
if (cacheEntities.containsKey(entity.getConceptId())) {
processedEntity = cacheEntities.get(entity.getConceptId());
if (Arrays.equals(new long[] { entity.getPersonCount(),entity.getDescendantPersonCount(),entity.getRecordCount(),entity.getDescendantRecordCount()},
new long[] {processedEntity.getPersonCount(),processedEntity.getDescendantPersonCount(),processedEntity.getRecordCount(),processedEntity.getDescendantRecordCount()})) {
return; // data hasn't changed, so move to next in forEach
}
} else {
// if cache entity does not exist - create new one
processedEntity = new CDMCacheEntity();
processedEntity.setConceptId(entity.getConceptId());
processedEntity.setSourceId(source.getSourceId());
cacheEntities.put(processedEntity.getConceptId(), processedEntity);
}
processedEntity.setPersonCount(entity.getPersonCount());
processedEntity.setDescendantPersonCount(entity.getDescendantPersonCount());
processedEntity.setRecordCount(entity.getRecordCount());
processedEntity.setDescendantRecordCount(entity.getDescendantRecordCount());
modified.add(processedEntity);
});
if (!modified.isEmpty()) {
cdmCacheRepository.save(modified);
}
return new ArrayList<>( cacheEntities.values());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/service/CDMCacheService.java | src/main/java/org/ohdsi/webapi/cdmresults/service/CDMCacheService.java | package org.ohdsi.webapi.cdmresults.service;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.ohdsi.webapi.cdmresults.mapper.BaseRecordCountMapper;
import org.ohdsi.webapi.cdmresults.mapper.DescendantRecordAndPersonCountMapper;
import org.ohdsi.webapi.cdmresults.mapper.DescendantRecordCountMapper;
import org.ohdsi.webapi.cdmresults.repository.CDMCacheRepository;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.PreparedSqlRender;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Service
public class CDMCacheService extends AbstractDaoService {
private static final int COUNTS_BATCH_SIZE = 1_000_000;
private static final int COUNTS_BATCH_THRESHOLD = 250_000;
private static final String CONCEPT_SQL = "/resources/cdmresults/sql/getConceptsForBatch.sql";
private static final String PARTIAL_CONCEPT_COUNT_SQL = "/resources/cdmresults/sql/getConceptRecordCount.sql";
private static final String PARTIAL_CONCEPT_COUNT_PERSON_SQL = "/resources/cdmresults/sql/getConceptRecordPersonCount.sql";
private static final String BATCH_CONCEPT_COUNT_SQL = "/resources/cdmresults/sql/getBatchConceptRecordCount.sql";
private static final String BATCH_CONCEPT_COUNT_PERSON_SQL = "/resources/cdmresults/sql/getBatchConceptRecordPersonCount.sql";
@Value("${spring.jpa.properties.hibernate.jdbc.batch_size}")
private int batchSize;
@Value("${cache.achilles.usePersonCount:false}")
private boolean usePersonCount;
private final CDMCacheBatchService cdmCacheBatchService;
private final CDMCacheRepository cdmCacheRepository;
private final ConversionService conversionService;
@Autowired
private SourceAccessor sourceAccessor;
public CDMCacheService(CDMCacheBatchService cdmCacheBatchService,
ConversionService conversionService,
CDMCacheRepository cdmCacheRepository) {
this.cdmCacheBatchService = cdmCacheBatchService;
this.conversionService = conversionService;
this.cdmCacheRepository = cdmCacheRepository;
}
public void warm(Source source) {
try {
TransactionTemplate transactionTemplate = getTransactionTemplateRequiresNew();
transactionTemplate.execute(s -> {
// Create full cache
cacheRecordsBatch(source);
return null;
});
} catch (Exception ex) {
log.error("Failed to warm cache {}. Exception: {}", source.getSourceKey(), ex.getLocalizedMessage());
}
}
public List<CDMCacheEntity> findAndCache(Source source, List<Integer> conceptIds) {
if (CollectionUtils.isEmpty(conceptIds)) {
return Collections.emptyList();
}
List<CDMCacheEntity> cacheEntities = find(source, conceptIds);
// Get list of concept identifiers
List<Integer> cacheConceptIds = cacheEntities.stream()
.map(CDMCacheEntity::getConceptId)
.collect(Collectors.toList());
// If number of cached entities is not equal to the number of requested entities then
// we have to get data from source
if (cacheEntities.size() != conceptIds.size()) {
List<Integer> cacheAbsentConceptIds = new ArrayList<>(CollectionUtils.subtract(conceptIds, cacheConceptIds));
cacheRecordsById(source, cacheAbsentConceptIds);
List<CDMCacheEntity> cacheAbsentEntities = find(source, cacheAbsentConceptIds);
cacheEntities.addAll(cacheAbsentEntities);
}
return cacheEntities;
}
@Transactional()
public void clearCache() {
List<Source> sources = getSourceRepository().findAll();
sources.stream().forEach(this::clearCache);
}
@Transactional()
public void clearCache(Source source) {
if (sourceAccessor.hasAccess(source)) {
cdmCacheRepository.deleteBySource(source.getSourceId());
}
}
private List<CDMCacheEntity> find(Source source, List<Integer> conceptIds) {
if (CollectionUtils.isEmpty(conceptIds)) {
return Collections.emptyList();
}
int start = 0, size = conceptIds.size();
List<CDMCacheEntity> cacheEntities = new ArrayList<>();
int parameterLimit = PreparedSqlRender.getParameterLimit(source);
// Get cached entities by calling the query for small pieces of
// the array of concept identifiers
while (start < size) {
int end = Math.min(start + parameterLimit, size);
List<Integer> idsSlice = conceptIds.subList(start, end);
start += parameterLimit;
cacheEntities.addAll(cdmCacheRepository.findBySourceAndConceptIds(source.getSourceId(), idsSlice));
}
return cacheEntities;
}
private void cacheRecordsBatch(Source source) {
BaseRecordCountMapper<?> mapper = this.usePersonCount ? new DescendantRecordAndPersonCountMapper() : new DescendantRecordCountMapper();
JdbcTemplate jdbcTemplate = this.getSourceJdbcTemplate(source);
// Full cache
// Make sure that query returns ordered collection of ids or sort it after query is executed
PreparedStatementRenderer cpsr = getConceptPreparedStatementRenderer(source);
List<Pair<Integer, Integer>> minMaxPairs = jdbcTemplate.query(cpsr.getSql(), cpsr.getSetter(), (rs, rowNum) -> {
return new ImmutablePair<>(rs.getInt("min_concept"), rs.getInt("max_concept"));
});
minMaxPairs.forEach(pair -> {
PreparedStatementRenderer psr = getBatchPreparedStatementRenderer(source, pair.getLeft(), pair.getRight());
loadCache(source, psr, mapper, jdbcTemplate);
});
}
private void cacheRecordsById(Source source, List<Integer> ids) {
BaseRecordCountMapper<?> mapper = this.usePersonCount ? new DescendantRecordAndPersonCountMapper() : new DescendantRecordCountMapper();
JdbcTemplate jdbcTemplate = this.getSourceJdbcTemplate(source);
// In case of getting records for concrete concepts we must call the query for small pieces of
// the array of concept identifiers
// places in the target query so the parameter limit will need to be divided
int parameterLimit = PreparedSqlRender.getParameterLimit(source);
int start = 0, size = ids.size();
while (start < size) {
int end = Math.min(start + parameterLimit, size);
List<Integer> idsSlice = ids.subList(start, end);
start += parameterLimit;
PreparedStatementRenderer psr = getPartialPreparedStatementRenderer(source, idsSlice);
Set<Integer> cachedIds = loadCache(source, psr, mapper, jdbcTemplate);
// in this batch, need to save any concepts that were not found when loading cache
List<Integer> notFoundIds = new ArrayList<Integer>(CollectionUtils.subtract(idsSlice, cachedIds));
if (!notFoundIds.isEmpty()) { // store zeros in cache
List<CDMCacheEntity> zeroConcepts = notFoundIds.stream().map(id -> {
CDMCacheEntity ce = new CDMCacheEntity();
ce.setConceptId(id);
ce.setRecordCount(0L);
ce.setDescendantRecordCount(0L);
ce.setPersonCount(0L);
ce.setDescendantPersonCount(0L);
return ce;
}).collect(Collectors.toList());
cdmCacheBatchService.save(source, zeroConcepts); // zero concepts will be less than batch size
}
}
}
private Set<Integer> loadCache(Source source,
PreparedStatementRenderer psr,
BaseRecordCountMapper<?> mapper,
JdbcTemplate jdbcTemplate) {
int BATCH_SIZE = 2000;
List<CDMCacheEntity> rows = new ArrayList<>(BATCH_SIZE);
Set<Integer> cachedIds = new HashSet<>();
jdbcTemplate.setFetchSize(BATCH_SIZE);
jdbcTemplate.query(psr.getSql(), psr.getSetter(), resultSet -> {
DescendantRecordCount row = mapper.mapRow(resultSet);
CDMCacheEntity cacheEntity = conversionService.convert(row, CDMCacheEntity.class);
cachedIds.add(cacheEntity.getConceptId());
rows.add(cacheEntity);
if (rows.size() == BATCH_SIZE) {
// Persist or merge batch
cdmCacheBatchService.save(source, rows);
rows.clear();
}
});
if (!rows.isEmpty()) {
cdmCacheBatchService.save(source, rows);
}
return cachedIds;
}
private PreparedStatementRenderer getConceptPreparedStatementRenderer(Source source) {
String resultTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String[] tables = {"resultTableQualifier"};
String[] tableValues = {resultTableQualifier};
String[] params = {"batch_size"};
Object[] values = {100000};
return new PreparedStatementRenderer(source, CONCEPT_SQL, tables, tableValues, params, values,
SessionUtils.sessionId());
}
private PreparedStatementRenderer getPartialPreparedStatementRenderer(Source source, List<Integer> ids) {
String resultTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String vocabularyTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String[] tables = {"resultTableQualifier", "vocabularyTableQualifier"};
String[] tableValues = {resultTableQualifier, vocabularyTableQualifier};
// Caching of concrete concepts
String sqlPath = this.usePersonCount ? PARTIAL_CONCEPT_COUNT_PERSON_SQL : PARTIAL_CONCEPT_COUNT_SQL;
Integer[] identifiers = ids.toArray(new Integer[0]);
return new PreparedStatementRenderer(source, sqlPath, tables, tableValues,
"conceptIdentifiers", identifiers);
}
private PreparedStatementRenderer getBatchPreparedStatementRenderer(Source source, int min, int max) {
String resultTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String vocabularyTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary);
String[] tables = {"resultTableQualifier", "vocabularyTableQualifier"};
String[] tableValues = {resultTableQualifier, vocabularyTableQualifier};
// Caching of concrete concepts
String sqlPath = this.usePersonCount ? BATCH_CONCEPT_COUNT_PERSON_SQL : BATCH_CONCEPT_COUNT_SQL;
String[] variables = new String[]{"conceptIdentifierMin", "conceptIdentifierMax"};
Integer[] identifiers = new Integer[]{min, max};
return new PreparedStatementRenderer(source, sqlPath, tables, tableValues,
variables, identifiers);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/cache/CDMResultsCache.java | src/main/java/org/ohdsi/webapi/cdmresults/cache/CDMResultsCache.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults.cache;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.eclipse.collections.impl.map.mutable.ConcurrentHashMapUnsafe;
import org.hibernate.internal.util.collections.BoundedConcurrentHashMap;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
/**
* This class caches not only the values from List<DescendantRecordCount>> getRecordsFromQueryFunction,
* it also caches the id arguments that are passed to this function.
* It makes possible to run only once id that does not present in the getRecordsFromQueryFunction storage.
*
*
* @author fdefalco, ymolodkov
*/
public class CDMResultsCache {
//BoundedConcurrentHashMap is hibernate implementation of the LRU(Least recently used) cache map. It supports concurrency out of the box, and does not block get operation.
//I set 1,000,000 for capacity, this is a significant amount, but at the same time it should be only 20-25mb for 8 digital ids
private Set<Integer> requestedIdsThatDoesNotHaveValueInStorage = Collections.newSetFromMap(new BoundedConcurrentHashMap<>(1_000_000));
private Map<Integer, DescendantRecordCount> cachedValues = new ConcurrentHashMapUnsafe<>();
private boolean warm;
public Collection<DescendantRecordCount> findAndCache(List<Integer> ids, Function<List<Integer>, List<DescendantRecordCount>> getRecordsFromQueryFunction) {
if (CollectionUtils.isEmpty(ids)) {
return Collections.emptyList();
}
List<Integer> notRequestedRecordIds = ids.stream().filter(this::notRequested)
.collect(Collectors.toList());
List<DescendantRecordCount> recordsFromCache = ids.stream().map(this::get).filter(Objects::nonNull)
.collect(Collectors.toList());
if (this.isWarm() || CollectionUtils.isEmpty(notRequestedRecordIds)) {
return recordsFromCache;
}
List<DescendantRecordCount> recordsFromQuery = getRecordsFromQueryFunction.apply(notRequestedRecordIds);
recordsFromQuery = CollectionUtils.isNotEmpty(recordsFromQuery)? recordsFromQuery: Collections.emptyList();
this.cacheValues(recordsFromQuery);
this.cacheRequestedIds(ids);
Collection<DescendantRecordCount> allRecords = CollectionUtils.union(
recordsFromCache,
recordsFromQuery
);
return allRecords;
}
public void warm() {
warm = true;
}
public boolean isWarm() {
return warm;
}
public boolean notWarm() {
return !warm;
}
public void cacheValue(DescendantRecordCount value) {
cachedValues.put(value.getId(), value);
}
public void cacheValues(Collection<DescendantRecordCount> values) {
values.forEach(this::cacheValue);
}
public DescendantRecordCount get(Integer id) {
return cachedValues.get(id);
}
protected void cacheRequestedId(Integer id) {
if (cachedValues.containsKey(id)) {
return;
}
requestedIdsThatDoesNotHaveValueInStorage.add(id);
}
protected void cacheRequestedIds(Collection<Integer> values) {
values.forEach(this::cacheRequestedId);
}
protected boolean isRequested(Integer id) {
return cachedValues.containsKey(id) || requestedIdsThatDoesNotHaveValueInStorage.contains(id);
}
protected boolean notRequested(Integer id) {
return !isRequested(id);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/domain/CDMCacheEntity.java | src/main/java/org/ohdsi/webapi/cdmresults/domain/CDMCacheEntity.java | package org.ohdsi.webapi.cdmresults.domain;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
@Entity
@Table(name = "cdm_cache")
public class CDMCacheEntity {
@Id
@SequenceGenerator(name = "cdm_cache_seq", sequenceName = "cdm_cache_seq", allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cdm_cache_seq")
private Long id;
@Column(name = "concept_id")
private int conceptId;
@Column(name = "source_id")
private int sourceId;
@Column(name = "record_count")
private Long recordCount;
@Column(name = "descendant_record_count")
private Long descendantRecordCount;
@Column(name = "person_count")
private Long personCount;
@Column(name = "descendant_person_count")
private Long descendantPersonCount;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getConceptId() {
return conceptId;
}
public void setConceptId(int conceptId) {
this.conceptId = conceptId;
}
public int getSourceId() {
return sourceId;
}
public void setSourceId(int sourceId) {
this.sourceId = sourceId;
}
public Long getRecordCount() {
return recordCount;
}
public void setRecordCount(Long recordCount) {
this.recordCount = recordCount;
}
public Long getDescendantRecordCount() {
return descendantRecordCount;
}
public void setDescendantRecordCount(Long descendantRecordCount) {
this.descendantRecordCount = descendantRecordCount;
}
public Long getPersonCount() {
return personCount;
}
public void setPersonCount(Long personCount) {
this.personCount = personCount;
}
public Long getDescendantPersonCount() {
return descendantPersonCount;
}
public void setDescendantPersonCount(Long descendantPersonCount) {
this.descendantPersonCount = descendantPersonCount;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/repository/CDMCacheRepository.java | src/main/java/org/ohdsi/webapi/cdmresults/repository/CDMCacheRepository.java | package org.ohdsi.webapi.cdmresults.repository;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface CDMCacheRepository extends CrudRepository<CDMCacheEntity, Long> {
@Query("select c from CDMCacheEntity c where c.sourceId = :sourceId and c.conceptId in :conceptIds")
List<CDMCacheEntity> findBySourceAndConceptIds(@Param("sourceId") int sourceId, @Param("conceptIds") List<Integer> conceptIds);
@Modifying
@Query("delete from CDMCacheEntity c where c.sourceId = :sourceId")
void deleteBySource(@Param("sourceId") int sourceId);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/converter/PersonCountToCDMCacheConverter.java | src/main/java/org/ohdsi/webapi/cdmresults/converter/PersonCountToCDMCacheConverter.java | package org.ohdsi.webapi.cdmresults.converter;
import org.ohdsi.webapi.cdmresults.DescendantRecordAndPersonCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.springframework.stereotype.Component;
@Component
public class PersonCountToCDMCacheConverter extends RecordCountToCDMCacheConverter<DescendantRecordAndPersonCount> {
@Override
public CDMCacheEntity convert(DescendantRecordAndPersonCount s) {
CDMCacheEntity target = super.convert(s);
target.setPersonCount(s.getPersonCount());
target.setDescendantPersonCount(s.getDescendantPersonCount());
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/converter/CDMCacheToRecordCountConverter.java | src/main/java/org/ohdsi/webapi/cdmresults/converter/CDMCacheToRecordCountConverter.java | package org.ohdsi.webapi.cdmresults.converter;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import org.springframework.stereotype.Component;
@Component
public class CDMCacheToRecordCountConverter
extends BaseCDMCacheToRecordCountConverter<DescendantRecordCount> {
protected DescendantRecordCount getResultObject() {
return new DescendantRecordCount();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/converter/RecordCountToCDMCacheConverter.java | src/main/java/org/ohdsi/webapi/cdmresults/converter/RecordCountToCDMCacheConverter.java | package org.ohdsi.webapi.cdmresults.converter;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.springframework.stereotype.Component;
@Component
public class RecordCountToCDMCacheConverter<S extends DescendantRecordCount>
extends BaseConversionServiceAwareConverter<S, CDMCacheEntity> {
@Override
public CDMCacheEntity convert(S s) {
CDMCacheEntity target = new CDMCacheEntity();
target.setConceptId(s.getId());
target.setRecordCount(s.getRecordCount());
target.setDescendantRecordCount(s.getDescendantRecordCount());
return target;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/converter/CDMCacheToPersonCountConverter.java | src/main/java/org/ohdsi/webapi/cdmresults/converter/CDMCacheToPersonCountConverter.java | package org.ohdsi.webapi.cdmresults.converter;
import org.ohdsi.webapi.cdmresults.DescendantRecordAndPersonCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.springframework.stereotype.Component;
@Component
public class CDMCacheToPersonCountConverter
extends BaseCDMCacheToRecordCountConverter<DescendantRecordAndPersonCount> {
@Override
public DescendantRecordAndPersonCount convert(CDMCacheEntity s) {
DescendantRecordAndPersonCount target = super.convert(s);
target.setPersonCount(s.getPersonCount());
target.setDescendantPersonCount(s.getDescendantPersonCount());
return target;
}
protected DescendantRecordAndPersonCount getResultObject() {
return new DescendantRecordAndPersonCount();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/converter/BaseCDMCacheToRecordCountConverter.java | src/main/java/org/ohdsi/webapi/cdmresults/converter/BaseCDMCacheToRecordCountConverter.java | package org.ohdsi.webapi.cdmresults.converter;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import org.ohdsi.webapi.cdmresults.domain.CDMCacheEntity;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
public abstract class BaseCDMCacheToRecordCountConverter<T extends DescendantRecordCount>
extends BaseConversionServiceAwareConverter<CDMCacheEntity, T> {
@Override
public T convert(CDMCacheEntity s) {
T target = getResultObject();
target.setId(s.getConceptId());
target.setRecordCount(s.getRecordCount());
target.setDescendantRecordCount(s.getDescendantRecordCount());
return target;
}
protected abstract T getResultObject();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/mapper/DescendantRecordCountMapper.java | src/main/java/org/ohdsi/webapi/cdmresults/mapper/DescendantRecordCountMapper.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults.mapper;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
public class DescendantRecordCountMapper extends BaseRecordCountMapper<DescendantRecordCount> {
protected DescendantRecordCount getResultObject() {
return new DescendantRecordCount();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/mapper/DescendantRecordAndPersonCountMapper.java | src/main/java/org/ohdsi/webapi/cdmresults/mapper/DescendantRecordAndPersonCountMapper.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults.mapper;
import org.ohdsi.webapi.cdmresults.DescendantRecordAndPersonCount;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import java.sql.ResultSet;
import java.sql.SQLException;
public class DescendantRecordAndPersonCountMapper extends BaseRecordCountMapper<DescendantRecordAndPersonCount> {
public DescendantRecordAndPersonCount mapRow(ResultSet rs)
throws SQLException {
DescendantRecordAndPersonCount result = super.mapRow(rs);
result.setPersonCount(rs.getLong("person_count"));
result.setDescendantPersonCount(rs.getLong("descendant_person_count"));
return result;
}
protected DescendantRecordAndPersonCount getResultObject() {
return new DescendantRecordAndPersonCount();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/cdmresults/mapper/BaseRecordCountMapper.java | src/main/java/org/ohdsi/webapi/cdmresults/mapper/BaseRecordCountMapper.java | /*
*
* Copyright 2017 Observational Health Data Sciences and Informatics
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.ohdsi.webapi.cdmresults.mapper;
import org.ohdsi.webapi.cdmresults.DescendantRecordCount;
import java.sql.ResultSet;
import java.sql.SQLException;
public abstract class BaseRecordCountMapper<T extends DescendantRecordCount> {
public T mapRow(ResultSet rs)
throws SQLException {
T descendantRecordCount = getResultObject();
descendantRecordCount.setId(rs.getInt("concept_id"));
descendantRecordCount.setRecordCount(rs.getLong("record_count"));
descendantRecordCount.setDescendantRecordCount(rs.getLong("descendant_record_count"));
return descendantRecordCount;
}
protected abstract T getResultObject();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/EstimationController.java | src/main/java/org/ohdsi/webapi/estimation/EstimationController.java | package org.ohdsi.webapi.estimation;
import com.odysseusinc.arachne.commons.utils.ConverterUtils;
import com.qmino.miredot.annotations.MireDotIgnore;
import com.qmino.miredot.annotations.ReturnType;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.check.CheckResult;
import org.ohdsi.webapi.check.checker.estimation.EstimationChecker;
import org.ohdsi.webapi.common.SourceMapKey;
import org.ohdsi.webapi.common.generation.ExecutionBasedGenerationDTO;
import org.ohdsi.webapi.common.sensitiveinfo.CommonGenerationSensitiveInfoService;
import org.ohdsi.webapi.estimation.domain.EstimationGenerationEntity;
import org.ohdsi.webapi.estimation.dto.EstimationDTO;
import org.ohdsi.webapi.estimation.dto.EstimationShortDTO;
import org.ohdsi.webapi.estimation.specification.EstimationAnalysisImpl;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.stereotype.Controller;
import javax.transaction.Transactional;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* Provides REST services for working with
* population-level estimation designs.
*
* @summary Estimation
*/
@Controller
@Path("/estimation/")
public class EstimationController {
private static final Logger LOGGER = LoggerFactory.getLogger(EstimationController.class);
private static final String NO_ESTIMATION_MESSAGE = "There is no estimation with id = %d.";
private static final String NO_GENERATION_MESSAGE = "There is no generation with id = %d";
private final EstimationService service;
private final GenericConversionService conversionService;
private final CommonGenerationSensitiveInfoService<ExecutionBasedGenerationDTO> sensitiveInfoService;
private final SourceService sourceService;
private final ConverterUtils converterUtils;
private final ScriptExecutionService executionService;
private EstimationChecker checker;
private PermissionService permissionService;
@Value("${security.defaultGlobalReadPermissions}")
private boolean defaultGlobalReadPermissions;
public EstimationController(EstimationService service,
GenericConversionService conversionService,
CommonGenerationSensitiveInfoService sensitiveInfoService,
SourceService sourceService,
ConverterUtils converterUtils,
ScriptExecutionService executionService, EstimationChecker checker,
PermissionService permissionService) {
this.service = service;
this.conversionService = conversionService;
this.sensitiveInfoService = sensitiveInfoService;
this.sourceService = sourceService;
this.converterUtils = converterUtils;
this.executionService = executionService;
this.checker = checker;
this.permissionService = permissionService;
}
/**
* Used to retrieve all estimation designs in the WebAPI database.
* @summary Get all estimation designs
* @return A list of all estimation design names and identifiers
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public List<EstimationShortDTO> getAnalysisList() {
return StreamSupport.stream(service.getAnalysisList().spliterator(), false)
.filter(!defaultGlobalReadPermissions ? entity -> permissionService.hasReadAccess(entity) : entity -> true)
.map(analysis -> {
EstimationShortDTO dto = conversionService.convert(analysis, EstimationShortDTO.class);
permissionService.fillWriteAccess(analysis, dto);
permissionService.fillReadAccess(analysis, dto);
return dto;
})
.collect(Collectors.toList());
}
/**
* Check to see if an estimation design exists by name
*
* @summary Estimation design exists by name
* @param id The estimation design id
* @param name The estimation design name
* @return 1 if an estimation design with the given name and id exist in WebAPI and 0 otherwise
*/
@GET
@Path("/{id}/exists")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public int getCountEstimationWithSameName(@PathParam("id") @DefaultValue("0") final int id, @QueryParam("name") String name) {
return service.getCountEstimationWithSameName(id, name);
}
/**
* Used to delete a selected estimation design by ID.
*
* @summary Delete an estimation designs
* @param id The identifier of the estimation design
* @return None
*/
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}")
public void delete(@PathParam("id") final int id) {
service.delete(id);
}
/**
* Used to add a new estimation design to the database
*
* @summary Save a new estimation design
* @param est The estimation design object
* @return An EstimationDTO which contains the identifier assigned to the estimation design.
*/
@POST
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public EstimationDTO createEstimation(Estimation est) throws Exception {
Estimation estWithId = service.createEstimation(est);
return reloadAndConvert(estWithId.getId());
}
/**
* Used to save changes to an existing estimation design by ID.
*
* @summary Update an estimation design
* @param id The ID of the estimation design
* @param est The estimation design object
* @return An EstimationDTO which contains the updated estimation design.
*/
@PUT
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public EstimationDTO updateEstimation(@PathParam("id") final int id, Estimation est) throws Exception {
service.updateEstimation(id, est);
return reloadAndConvert(id);
}
/**
* Used to create a copy of an existing existing estimation design by ID.
*
* @summary Copy an estimation design
* @param id The ID of the estimation design
* @return An EstimationDTO which contains the newly copied estimation design.
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/copy")
@Transactional
public EstimationDTO copy(@PathParam("id") final int id) throws Exception {
Estimation est = service.copy(id);
return reloadAndConvert(est.getId());
}
/**
* Used to retrieve an existing existing estimation design by ID.
*
* @summary Get an estimation design by ID
* @param id The ID of the estimation design
* @return An EstimationDTO which contains the estimation design.
*/
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public EstimationDTO getAnalysis(@PathParam("id") int id) {
Estimation est = service.getAnalysis(id);
ExceptionUtils.throwNotFoundExceptionIfNull(est, String.format(NO_ESTIMATION_MESSAGE, id));
return conversionService.convert(est, EstimationDTO.class);
}
/**
* Used to export an existing existing estimation design by ID. This is used
* when transferring the object outside of WebAPI.
*
* @summary Export an estimation design
* @param id The ID of the estimation design
* @return An EstimationAnalysisImpl which resolves all references to cohorts, concept sets, etc
* and contains the full estimation design for export.
*/
@GET
@Path("{id}/export")
@Produces(MediaType.APPLICATION_JSON)
@ReturnType("java.lang.Object")
public EstimationAnalysisImpl exportAnalysis(@PathParam("id") int id) {
Estimation estimation = service.getAnalysis(id);
ExceptionUtils.throwNotFoundExceptionIfNull(estimation, String.format(NO_ESTIMATION_MESSAGE, id));
return service.exportAnalysis(estimation);
}
/**
* Import a full estimation design
*
* @summary Import an estimation design
* @param analysis The full estimation design
* @return The newly imported estimation
*/
@POST
@Path("import")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@MireDotIgnore // @BodyType("java.lang.Object") doesn't fix the issue
public EstimationDTO importAnalysis(EstimationAnalysisImpl analysis) throws Exception {
if (Objects.isNull(analysis)) {
LOGGER.error("Failed to import Estimation, empty or not valid source JSON");
throw new InternalServerErrorException();
}
Estimation importedEstimation = service.importAnalysis(analysis);
return reloadAndConvert(importedEstimation.getId());
}
/**
* Download an R package to execute the estimation study
*
* @summary Download an estimation R package
* @param id The id for the estimation study
* @param packageName The R package name for the study
* @return Binary zip file containing the full R package
* @throws IOException
*/
@GET
@Path("{id}/download")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response download(@PathParam("id") int id, @QueryParam("packageName") String packageName) throws IOException {
if (packageName == null) {
packageName = "estimation" + String.valueOf(id);
}
EstimationAnalysisImpl analysis = this.exportAnalysis(id);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
service.hydrateAnalysis(analysis, packageName, baos);
return Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", String.format("attachment; filename=\"estimation_%d.zip\"", id))
.build();
}
/**
* Generate an estimation design by ID on a specific sourceKey. Please note
* this requires configuration of the Arachne Execution Engine.
*
* @summary Generate an estimation on a selected source
* @param id The id for the estimation study
* @param sourceKey The CDM source key
* @return JobExecutionResource The job information
* @throws IOException
*/
@POST
@Path("{id}/generation/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource runGeneration(@PathParam("id") Integer analysisId,
@PathParam("sourceKey") String sourceKey) throws IOException {
Estimation analysis = service.getAnalysis(analysisId);
ExceptionUtils.throwNotFoundExceptionIfNull(analysis, String.format(NO_ESTIMATION_MESSAGE, analysisId));
EstimationDTO estimationDTO = conversionService.convert(analysis, EstimationDTO.class);
CheckResult checkResult = runDiagnostics(estimationDTO);
if (checkResult.hasCriticalErrors()) {
throw new RuntimeException("Cannot be generated due to critical errors in design. Call 'check' service for further details");
}
return service.runGeneration(analysis, sourceKey);
}
/**
* Get a list of generations for the selected estimation design.
*
* @summary Get generations for an estimation design
* @param id The id for the estimation design
* @return List<ExecutionBasedGenerationDTO> The list of generations
*/
@GET
@Path("{id}/generation")
@Produces(MediaType.APPLICATION_JSON)
public List<ExecutionBasedGenerationDTO> getGenerations(@PathParam("id") Integer analysisId) {
Map<String, Source> sourcesMap = sourceService.getSourcesMap(SourceMapKey.BY_SOURCE_KEY);
return sensitiveInfoService.filterSensitiveInfo(converterUtils.convertList(service.getEstimationGenerations(analysisId), ExecutionBasedGenerationDTO.class),
info -> Collections.singletonMap(Constants.Variables.SOURCE, sourcesMap.get(info.getSourceKey())));
}
/**
* Get an estimation design generation info.
*
* @summary Get estimation design generation info
* @param generationId The id for the estimation generation
* @return ExecutionBasedGenerationDTO The generation information
*/
@GET
@Path("/generation/{generationId}")
@Produces(MediaType.APPLICATION_JSON)
public ExecutionBasedGenerationDTO getGeneration(@PathParam("generationId") Long generationId) {
EstimationGenerationEntity generationEntity = service.getGeneration(generationId);
ExceptionUtils.throwNotFoundExceptionIfNull(generationEntity, String.format(NO_GENERATION_MESSAGE, generationId));
return sensitiveInfoService.filterSensitiveInfo(conversionService.convert(generationEntity, ExecutionBasedGenerationDTO.class),
Collections.singletonMap(Constants.Variables.SOURCE, generationEntity.getSource()));
}
/**
* Get an estimation design generation result.
*
* @summary Get estimation design generation result
* @param generationId The id for the estimation generation
* @return Response Streams a binary ZIP file with the results
*/
@GET
@Path("/generation/{generationId}/result")
@Produces("application/zip")
public Response downloadResults(@PathParam("generationId") Long generationId) throws IOException {
File archive = executionService.getExecutionResult(generationId);
return Response.ok(archive)
.header("Content-type", "application/zip")
.header("Content-Disposition", "attachment; filename=\"" + archive.getName() + "\"")
.build();
}
private EstimationDTO reloadAndConvert(Integer id) {
// Before conversion entity must be refreshed to apply entity graphs
Estimation estimation = service.getById(id);
return conversionService.convert(estimation, EstimationDTO.class);
}
/**
* Performs a series of checks of the estimation design to ensure it will
* properly execute.
*
* @summary Check an estimation design for logic flaws
* @param estimationDTO The estimation design
* @return CheckResult The results of performing all checks
*/
@POST
@Path("/check")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public CheckResult runDiagnostics(EstimationDTO estimationDTO){
return new CheckResult(this.checker.check(estimationDTO));
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/Estimation.java | src/main/java/org/ohdsi/webapi/estimation/Estimation.java | package org.ohdsi.webapi.estimation;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.Table;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Type;
import org.ohdsi.analysis.estimation.design.EstimationTypeEnum;
import org.ohdsi.webapi.model.CommonEntity;
@Entity(name = "Estimation")
@Table(name="estimation")
public class Estimation extends CommonEntity<Integer> {
@Id
@GenericGenerator(
name = "estimation_generator",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "estimation_seq"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(generator = "estimation_generator")
@Column(name = "estimation_id")
private Integer id;
@Column(name = "name")
private String name;
@Column(name = "type")
private String type;
@Column(name = "description")
private String description;
@Lob
@Type(type = "org.hibernate.type.TextType")
private String specification;
/**
* @return the id
*/
@Override
public Integer getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(Integer id) {
this.id = id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the type
*/
public EstimationTypeEnum getType() {
return EstimationTypeEnum.fromValue(type);
}
/**
* @param type the type to set
*/
public void setType(EstimationTypeEnum type) {
this.type = type.toString();
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the specification
*/
public String getSpecification() {
return specification;
}
/**
* @param specification the specification to set
*/
public void setSpecification(String specification) {
this.specification = specification;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/EstimationService.java | src/main/java/org/ohdsi/webapi/estimation/EstimationService.java | package org.ohdsi.webapi.estimation;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.ohdsi.webapi.estimation.domain.EstimationGenerationEntity;
import org.ohdsi.webapi.estimation.specification.EstimationAnalysisImpl;
import org.ohdsi.webapi.job.JobExecutionResource;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
public interface EstimationService {
Iterable<Estimation> getAnalysisList();
void delete(final int id);
Estimation createEstimation(Estimation estimation) throws Exception;
Estimation updateEstimation(final int id, Estimation est) throws Exception;
Estimation copy(final int id) throws Exception;
Estimation getAnalysis(int id);
EstimationAnalysisImpl getAnalysisExpression(int id);
EstimationAnalysisImpl exportAnalysis(Estimation est, String sourceKey);
EstimationAnalysisImpl exportAnalysis(Estimation est);
Estimation importAnalysis(EstimationAnalysisImpl est) throws Exception;
String getNameForCopy(String dtoName);
void hydrateAnalysis(EstimationAnalysisImpl analysis, String packageName, OutputStream out) throws JsonProcessingException;
JobExecutionResource runGeneration(Estimation estimation, String sourceKey) throws IOException;
List<EstimationGenerationEntity> getEstimationGenerations(Integer estimationAnalysisId);
EstimationGenerationEntity getGeneration(Long generationId);
Estimation getById(Integer id);
int getCountEstimationWithSameName(Integer id, String name);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java | src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java | package org.ohdsi.webapi.estimation;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraphUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.analysis.estimation.design.EstimationTypeEnum;
import org.ohdsi.analysis.estimation.design.NegativeControlTypeEnum;
import org.ohdsi.analysis.estimation.design.Settings;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.circe.vocabulary.ConceptSetExpression;
import org.ohdsi.circe.vocabulary.ConceptSetExpression.ConceptSetItem;
import org.ohdsi.webapi.analysis.AnalysisCohortDefinition;
import org.ohdsi.webapi.analysis.AnalysisConceptSet;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository;
import org.ohdsi.webapi.common.DesignImportService;
import org.ohdsi.webapi.common.generation.AnalysisExecutionSupport;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.ohdsi.webapi.conceptset.ConceptSetCrossReferenceImpl;
import org.ohdsi.webapi.estimation.comparativecohortanalysis.specification.CohortMethodAnalysisImpl;
import org.ohdsi.webapi.estimation.comparativecohortanalysis.specification.ComparativeCohortAnalysisImpl;
import org.ohdsi.webapi.estimation.comparativecohortanalysis.specification.TargetComparatorOutcomesImpl;
import org.ohdsi.webapi.estimation.domain.EstimationGenerationEntity;
import org.ohdsi.webapi.estimation.repository.EstimationAnalysisGenerationRepository;
import org.ohdsi.webapi.estimation.repository.EstimationRepository;
import org.ohdsi.webapi.estimation.specification.EstimationAnalysisImpl;
import org.ohdsi.webapi.estimation.specification.NegativeControlImpl;
import org.ohdsi.webapi.executionengine.entity.AnalysisFile;
import org.ohdsi.webapi.featureextraction.specification.CovariateSettingsImpl;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.service.ConceptSetService;
import org.ohdsi.webapi.service.JobService;
import org.ohdsi.webapi.source.SourceService;
import org.ohdsi.webapi.service.VocabularyService;
import org.ohdsi.webapi.service.dto.ConceptSetDTO;
import org.ohdsi.webapi.shiro.annotations.DataSourceAccess;
import org.ohdsi.webapi.shiro.annotations.SourceKey;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.util.ExportUtil;
import org.ohdsi.webapi.util.EntityUtils;
import org.ohdsi.webapi.util.NameUtils;
import org.ohdsi.webapi.util.SessionUtils;
import org.ohdsi.webapi.util.TempFileUtils;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.transaction.Transactional;
import javax.ws.rs.InternalServerErrorException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.ohdsi.webapi.Constants.GENERATE_ESTIMATION_ANALYSIS;
import static org.ohdsi.webapi.Constants.Params.ESTIMATION_ANALYSIS_ID;
import static org.ohdsi.webapi.Constants.Params.JOB_NAME;
@Service
@Transactional
public class EstimationServiceImpl extends AnalysisExecutionSupport implements EstimationService, GeneratesNotification {
private static final String CONCEPT_SET_XREF_KEY_TARGET_COMPARATOR_OUTCOME = "estimationAnalysisSettings.analysisSpecification.targetComparatorOutcomes";
public static final String CONCEPT_SET_XREF_KEY_NEGATIVE_CONTROL_OUTCOMES = "negativeControlOutcomes";
private static final String CONCEPT_SET_XREF_KEY_COHORT_METHOD_COVAR = "estimationAnalysisSettings.analysisSpecification.cohortMethodAnalysisList.getDbCohortMethodDataArgs.covariateSettings";
private static final String CONCEPT_SET_XREF_KEY_POS_CONTROL_COVAR = "positiveControlSynthesisArgs.covariateSettings";
private static final String CONCEPT_SET_XREF_KEY_INCLUDED_COVARIATE_CONCEPT_IDS = "includedCovariateConceptIds";
private static final String CONCEPT_SET_XREF_KEY_EXCLUDED_COVARIATE_CONCEPT_IDS = "excludedCovariateConceptIds";
private final String EXEC_SCRIPT = ResourceHelper.GetResourceAsString("/resources/estimation/r/runAnalysis.R");
private final EntityGraph DEFAULT_ENTITY_GRAPH = EntityGraphUtils.fromAttributePaths("source", "analysisExecution.resultFiles");
private final EntityGraph COMMONS_ENTITY_GRAPH = EntityUtils.fromAttributePaths(
"createdBy",
"modifiedBy"
);
@PersistenceContext
protected EntityManager entityManager;
@Autowired
private CohortDefinitionRepository cohortDefinitionRepository;
@Autowired
private ConceptSetService conceptSetService;
@Autowired
private VocabularyService vocabularyService;
@Autowired
private EstimationRepository estimationRepository;
@Autowired
private Environment env;
@Autowired
private GenerationUtils generationUtils;
@Autowired
private SourceService sourceService;
@Autowired
private JobService jobService;
@Autowired
private EstimationAnalysisGenerationRepository generationRepository;
@Autowired
private SourceAccessor sourceAccessor;
@Autowired
private DesignImportService designImportService;
@Autowired
private ConversionService conversionService;
@Value("${organization.name}")
private String organizationName;
@Override
public Iterable<Estimation> getAnalysisList() {
return estimationRepository.findAll(COMMONS_ENTITY_GRAPH);
}
@Override
public int getCountEstimationWithSameName(Integer id, String name) {
return estimationRepository.getCountEstimationWithSameName(id, name);
}
@Override
public Estimation getById(Integer id) {
return estimationRepository.findOne(id, COMMONS_ENTITY_GRAPH);
}
@Override
public void delete(final int id) {
this.estimationRepository.delete(id);
}
@Override
public Estimation createEstimation(Estimation est) throws Exception {
Date currentTime = Calendar.getInstance().getTime();
est.setCreatedBy(getCurrentUser());
est.setCreatedDate(currentTime);
// Fields with information about modifications have to be reseted
est.setModifiedBy(null);
est.setModifiedDate(null);
est.setName(StringUtils.trim(est.getName()));
return save(est);
}
@Override
public Estimation updateEstimation(final int id, Estimation est) throws Exception {
Estimation estFromDB = getById(id);
Date currentTime = Calendar.getInstance().getTime();
est.setModifiedBy(getCurrentUser());
est.setModifiedDate(currentTime);
// Prevent any updates to protected fields like created/createdBy
est.setCreatedDate(estFromDB.getCreatedDate());
est.setCreatedBy(estFromDB.getCreatedBy());
est.setName(StringUtils.trim(est.getName()));
return save(est);
}
private List<String> getNamesLike(String name) {
return estimationRepository.findAllByNameStartsWith(name).stream().map(Estimation::getName).collect(Collectors.toList());
}
@Override
public Estimation copy(final int id) throws Exception {
Estimation est = estimationRepository.findOne(id);
entityManager.detach(est); // Detach from the persistence context in order to save a copy
est.setId(null);
est.setName(getNameForCopy(est.getName()));
return this.createEstimation(est);
}
@Override
public Estimation getAnalysis(int id) {
return estimationRepository.findOne(id, COMMONS_ENTITY_GRAPH);
}
@Override
public EstimationAnalysisImpl getAnalysisExpression(int id) {
return Utils.deserialize(estimationRepository.findOne(id, COMMONS_ENTITY_GRAPH).getSpecification(), EstimationAnalysisImpl.class);
}
@Override
public EstimationAnalysisImpl exportAnalysis(Estimation est) {
return exportAnalysis(est, sourceService.getPriorityVocabularySource().getSourceKey());
}
@Override
public EstimationAnalysisImpl exportAnalysis(Estimation est, String sourceKey) {
EstimationAnalysisImpl expression;
try {
expression = Utils.deserialize(est.getSpecification(), EstimationAnalysisImpl.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
// Set the root properties
expression.setId(est.getId());
expression.setName(StringUtils.trim(est.getName()));
expression.setDescription(est.getDescription());
expression.setOrganizationName(this.organizationName);
// Retrieve the cohort definition details
List<AnalysisCohortDefinition> detailedList = new ArrayList<>();
for (AnalysisCohortDefinition c : expression.getCohortDefinitions()) {
CohortDefinition cd = cohortDefinitionRepository.findOneWithDetail(c.getId());
detailedList.add(new AnalysisCohortDefinition(cd));
}
expression.setCohortDefinitions(detailedList);
// Retrieve the concept set expressions
List<AnalysisConceptSet> ecsList = new ArrayList<>();
Map<Integer, List<Long>> conceptIdentifiers = new HashMap<>();
Map<Integer, ConceptSetExpression> csExpressionList = new HashMap<>();
for (AnalysisConceptSet pcs : expression.getConceptSets()) {
pcs.expression = conceptSetService.getConceptSetExpression(pcs.id, sourceKey);
csExpressionList.put(pcs.id, pcs.expression);
ecsList.add(pcs);
conceptIdentifiers.put(pcs.id, new ArrayList<>(vocabularyService.resolveConceptSetExpression(pcs.expression)));
}
expression.setConceptSets(ecsList);
// Resolve all ConceptSetCrossReferences
for (ConceptSetCrossReferenceImpl xref : expression.getConceptSetCrossReference()) {
// TODO: Make this conditional on the expression.getEstimationAnalysisSettings().getEstimationType() vs
// hard coded to always use a comparative cohort analysis once we have implemented the other
// estimation types
Settings settings = expression.getEstimationAnalysisSettings().getAnalysisSpecification();
ComparativeCohortAnalysisImpl ccaSpec = (ComparativeCohortAnalysisImpl) settings;
List<TargetComparatorOutcomesImpl> tcoList = ccaSpec.getTargetComparatorOutcomes();
List<CohortMethodAnalysisImpl> ccaList = ccaSpec.getCohortMethodAnalysisList();
if (xref.getTargetName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_TARGET_COMPARATOR_OUTCOME)) {
TargetComparatorOutcomesImpl tco = tcoList.get(xref.getTargetIndex());
List<Long> conceptIds = conceptIdentifiers.get(xref.getConceptSetId());
if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_INCLUDED_COVARIATE_CONCEPT_IDS)) {
tco.setIncludedCovariateConceptIds(conceptIds);
} else if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_EXCLUDED_COVARIATE_CONCEPT_IDS)) {
tco.setExcludedCovariateConceptIds(conceptIds);
}
} else if (xref.getTargetName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_NEGATIVE_CONTROL_OUTCOMES)) {
// Fill in the negative controls for each T/C pair as specified
TargetComparatorOutcomesImpl tco = tcoList.get(xref.getTargetIndex());
ConceptSetExpression e = csExpressionList.get(xref.getConceptSetId());
for(ConceptSetItem csi : e.items) {
NegativeControlImpl nc = new NegativeControlImpl();
nc.setTargetId(tco.getTargetId());
nc.setComparatorId(tco.getComparatorId());
nc.setOutcomeId(csi.concept.conceptId);
nc.setOutcomeName(csi.concept.conceptName);
nc.setType(NegativeControlTypeEnum.OUTCOME);
expression.addNegativeControlsItem(nc);
}
} else if (xref.getTargetName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_COHORT_METHOD_COVAR)) {
CohortMethodAnalysisImpl cca = ccaList.get(xref.getTargetIndex());
CovariateSettingsImpl dbCohortMethodCovarSettings = cca.getDbCohortMethodDataArgs().getCovariateSettings();
List<Long> conceptIds = conceptIdentifiers.get(xref.getConceptSetId());
if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_INCLUDED_COVARIATE_CONCEPT_IDS)) {
dbCohortMethodCovarSettings.setIncludedCovariateConceptIds(conceptIds);
} else if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_EXCLUDED_COVARIATE_CONCEPT_IDS)) {
dbCohortMethodCovarSettings.setExcludedCovariateConceptIds(conceptIds);
}
} else if (xref.getTargetName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_POS_CONTROL_COVAR)) {
if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_INCLUDED_COVARIATE_CONCEPT_IDS)) {
expression.getPositiveControlSynthesisArgs().getCovariateSettings().setIncludedCovariateConceptIds(conceptIdentifiers.get(xref.getConceptSetId()));
} else if (xref.getPropertyName().equalsIgnoreCase(CONCEPT_SET_XREF_KEY_EXCLUDED_COVARIATE_CONCEPT_IDS)) {
expression.getPositiveControlSynthesisArgs().getCovariateSettings().setExcludedCovariateConceptIds(conceptIdentifiers.get(xref.getConceptSetId()));
}
}
}
ExportUtil.clearCreateAndUpdateInfo(expression);
return expression;
}
@Override
public Estimation importAnalysis(EstimationAnalysisImpl analysis) throws Exception {
try {
if (Objects.isNull(analysis.getEstimationAnalysisSettings())) {
log.error("Failed to import Estimation. Invalid source JSON. EstimationAnalysisSettings is empty");
throw new InternalServerErrorException();
}
if (analysis.getEstimationAnalysisSettings().getEstimationType() != EstimationTypeEnum.COMPARATIVE_COHORT_ANALYSIS) {
String estimationType = analysis.getEstimationAnalysisSettings().getEstimationType().name();
throw new UnsupportedOperationException("Cannot import " + estimationType);
}
// Open up the analysis and get the relevant portions for import
Settings settings = analysis.getEstimationAnalysisSettings().getAnalysisSpecification();
ComparativeCohortAnalysisImpl ccaSpec = (ComparativeCohortAnalysisImpl) settings;
List<TargetComparatorOutcomesImpl> tcoList = ccaSpec.getTargetComparatorOutcomes();
List<CohortMethodAnalysisImpl> ccaList = ccaSpec.getCohortMethodAnalysisList();
// Create all of the cohort definitions
// and map the IDs from old -> new
Map<Long, Long> cohortIds = new HashMap<>();
analysis.getCohortDefinitions().forEach((analysisCohortDefinition) -> {
Integer oldId = analysisCohortDefinition.getId();
analysisCohortDefinition.setId(null);
CohortDefinition cd = designImportService.persistCohortOrGetExisting(conversionService.convert(analysisCohortDefinition, CohortDefinition.class), true);
cohortIds.put(Long.valueOf(oldId), Long.valueOf(cd.getId()));
analysisCohortDefinition.setId(cd.getId());
analysisCohortDefinition.setName(cd.getName());
log.debug("cohort created: " + cd.getId());
});
// Create all of the concept sets and map
// the IDs from old -> new
Map<Integer, Integer> conceptSetIdMap = new HashMap<>();
analysis.getConceptSets().forEach((pcs) -> {
int oldId = pcs.id;
ConceptSetDTO cs = designImportService.persistConceptSet(pcs);
pcs.id = cs.getId();
pcs.name = cs.getName();
conceptSetIdMap.put(oldId, cs.getId());
log.debug("concept set created: " + cs.getId());
});
// Replace all of the T/C/Os with the new IDs
tcoList.forEach((tco) -> {
// Get the new IDs
Long newT = cohortIds.get(tco.getTargetId());
Long newC = cohortIds.get(tco.getComparatorId());
List<Long> newOs = new ArrayList<>();
tco.getOutcomeIds().forEach((o) -> {
newOs.add(cohortIds.get(o));
});
// Set the TCO to use the new IDs
tco.setTargetId(newT);
tco.setComparatorId(newC);
tco.setOutcomeIds(newOs);
// Clear any included/excluded covarite concept ids
tco.setExcludedCovariateConceptIds(new ArrayList<>());
tco.setIncludedCovariateConceptIds(new ArrayList<>());
});
// Replace all of the concept sets
analysis.getConceptSetCrossReference().forEach((ConceptSetCrossReferenceImpl xref) -> {
Integer newConceptSetId = conceptSetIdMap.get(xref.getConceptSetId());
xref.setConceptSetId(newConceptSetId);
});
// Clear all of the concept IDs from the covariate settings
ccaList.forEach((cca) -> {
cca.getDbCohortMethodDataArgs().getCovariateSettings().setIncludedCovariateConceptIds(new ArrayList<>());
cca.getDbCohortMethodDataArgs().getCovariateSettings().setExcludedCovariateConceptIds(new ArrayList<>());
});
analysis.getPositiveControlSynthesisArgs().getCovariateSettings().setIncludedCovariateConceptIds(new ArrayList<>());
analysis.getPositiveControlSynthesisArgs().getCovariateSettings().setExcludedCovariateConceptIds(new ArrayList<>());
// Remove all of the negative controls as
// these are populated upon export
analysis.setNegativeControls(new ArrayList<>());
// Remove the ID
analysis.setId(null);
// Create the estimation
Estimation est = new Estimation();
est.setDescription(analysis.getDescription());
est.setType(EstimationTypeEnum.COMPARATIVE_COHORT_ANALYSIS);
est.setSpecification(Utils.serialize(analysis));
est.setName(NameUtils.getNameWithSuffix(analysis.getName(), this::getNamesLike));
Estimation savedEstimation = this.createEstimation(est);
return estimationRepository.findOne(savedEstimation.getId(), COMMONS_ENTITY_GRAPH);
} catch (Exception e) {
log.debug("Error while importing estimation analysis: " + e.getMessage());
throw e;
}
}
@Override
public String getNameForCopy(String dtoName) {
return NameUtils.getNameForCopy(dtoName, this::getNamesLike, estimationRepository.findByName(dtoName));
}
@Override
public void hydrateAnalysis(EstimationAnalysisImpl analysis, String packageName, OutputStream out) throws JsonProcessingException {
if (packageName == null || !Utils.isAlphaNumeric(packageName)) {
throw new IllegalArgumentException("The package name must be alphanumeric only.");
}
analysis.setPackageName(packageName);
super.hydrateAnalysis(analysis, out);
}
@Override
@DataSourceAccess
public JobExecutionResource runGeneration(Estimation estimation, @SourceKey String sourceKey) throws IOException {
final Source source = sourceService.findBySourceKey(sourceKey);
final Integer analysisId = estimation.getId();
String packageName = String.format("EstimationAnalysis.%s", SessionUtils.sessionId());
String packageFilename = String.format("estimation_study_%d.zip", analysisId);
List<AnalysisFile> analysisFiles = new ArrayList<>();
AnalysisFile analysisFile = new AnalysisFile();
analysisFile.setFileName(packageFilename);
try(ByteArrayOutputStream out = new ByteArrayOutputStream()) {
EstimationAnalysisImpl analysis = exportAnalysis(estimation, sourceKey);
hydrateAnalysis(analysis, packageName, out);
analysisFile.setContents(out.toByteArray());
}
analysisFiles.add(analysisFile);
analysisFiles.add(prepareAnalysisExecution(packageName, packageFilename, analysisId));
JobParametersBuilder builder = prepareJobParametersBuilder(source, analysisId, packageName, packageFilename)
.addString(ESTIMATION_ANALYSIS_ID, analysisId.toString())
.addString(JOB_NAME, String.format("Generating Estimation Analysis %d using %s (%s)", analysisId, source.getSourceName(), source.getSourceKey()));
Job generateAnalysisJob = generationUtils.buildJobForExecutionEngineBasedAnalysisTasklet(
GENERATE_ESTIMATION_ANALYSIS,
source,
builder,
analysisFiles
).build();
return jobService.runJob(generateAnalysisJob, builder.toJobParameters());
}
@Override
protected String getExecutionScript() {
return EXEC_SCRIPT;
}
@Override
public List<EstimationGenerationEntity> getEstimationGenerations(Integer estimationAnalysisId) {
return generationRepository
.findByEstimationAnalysisId(estimationAnalysisId, DEFAULT_ENTITY_GRAPH)
.stream()
.filter(gen -> sourceAccessor.hasAccess(gen.getSource()))
.collect(Collectors.toList());
}
@Override
public EstimationGenerationEntity getGeneration(Long generationId) {
return generationRepository.findOne(generationId, DEFAULT_ENTITY_GRAPH);
}
private Estimation save(Estimation analysis) {
analysis = estimationRepository.saveAndFlush(analysis);
entityManager.refresh(analysis);
analysis = getById(analysis.getId());
return analysis;
}
@Override
public String getJobName() {
return GENERATE_ESTIMATION_ANALYSIS;
}
@Override
public String getExecutionFoldingKey() {
return ESTIMATION_ANALYSIS_ID;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/dto/EstimationShortDTO.java | src/main/java/org/ohdsi/webapi/estimation/dto/EstimationShortDTO.java | package org.ohdsi.webapi.estimation.dto;
import org.ohdsi.analysis.estimation.design.EstimationTypeEnum;
import org.ohdsi.webapi.common.analyses.CommonAnalysisDTO;
public class EstimationShortDTO extends CommonAnalysisDTO {
private EstimationTypeEnum type = EstimationTypeEnum.COMPARATIVE_COHORT_ANALYSIS;
public void setType(EstimationTypeEnum type) {
this.type = type;
}
public EstimationTypeEnum getType() {
return this.type;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/dto/EstimationDTO.java | src/main/java/org/ohdsi/webapi/estimation/dto/EstimationDTO.java | package org.ohdsi.webapi.estimation.dto;
public class EstimationDTO extends EstimationShortDTO {
private String specification;
public String getSpecification() {
return specification;
}
public void setSpecification(String specification) {
this.specification = specification;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/domain/EstimationGenerationEntity.java | src/main/java/org/ohdsi/webapi/estimation/domain/EstimationGenerationEntity.java | package org.ohdsi.webapi.estimation.domain;
import org.ohdsi.webapi.estimation.Estimation;
import org.ohdsi.webapi.executionengine.entity.ExecutionEngineGenerationEntity;
import javax.persistence.*;
@Entity
@Table(name = "estimation_analysis_generation")
public class EstimationGenerationEntity extends ExecutionEngineGenerationEntity {
@ManyToOne(targetEntity = Estimation.class, fetch = FetchType.LAZY)
@JoinColumn(name = "estimation_id")
private Estimation estimationAnalysis;
public Estimation getEstimationAnalysis() {
return estimationAnalysis;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/repository/EstimationAnalysisGenerationRepository.java | src/main/java/org/ohdsi/webapi/estimation/repository/EstimationAnalysisGenerationRepository.java | package org.ohdsi.webapi.estimation.repository;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository;
import org.ohdsi.webapi.estimation.domain.EstimationGenerationEntity;
import java.util.List;
public interface EstimationAnalysisGenerationRepository extends EntityGraphJpaRepository<EstimationGenerationEntity, Long> {
List<EstimationGenerationEntity> findByEstimationAnalysisId(Integer estimationAnalysisId, EntityGraph entityGraph);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/repository/EstimationRepository.java | src/main/java/org/ohdsi/webapi/estimation/repository/EstimationRepository.java | package org.ohdsi.webapi.estimation.repository;
import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository;
import org.ohdsi.webapi.estimation.Estimation;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
import java.util.Optional;
public interface EstimationRepository extends EntityGraphJpaRepository<Estimation, Integer> {
@Query("SELECT es FROM Estimation es WHERE es.name LIKE ?1 ESCAPE '\\'")
List<Estimation> findAllByNameStartsWith(String pattern);
Optional<Estimation> findByName(String name);
@Query("SELECT COUNT(es) FROM Estimation es WHERE es.name = :name and es.id <> :id")
int getCountEstimationWithSameName(@Param("id") Integer id, @Param("name") String name);
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/converter/EstimationToEstimationDTOConverter.java | src/main/java/org/ohdsi/webapi/estimation/converter/EstimationToEstimationDTOConverter.java | package org.ohdsi.webapi.estimation.converter;
import org.ohdsi.webapi.estimation.Estimation;
import org.ohdsi.webapi.estimation.dto.EstimationDTO;
import org.springframework.stereotype.Component;
@Component
public class EstimationToEstimationDTOConverter extends EstimationToEstimationShortDTOConverter<EstimationDTO> {
@Override
protected EstimationDTO createResultObject() {
return new EstimationDTO();
}
@Override
public void doConvert(Estimation source, EstimationDTO target) {
super.doConvert(source, target);
target.setSpecification(source.getSpecification());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/converter/EstimationGenerationToCommonGenerationDtoConverter.java | src/main/java/org/ohdsi/webapi/estimation/converter/EstimationGenerationToCommonGenerationDtoConverter.java | package org.ohdsi.webapi.estimation.converter;
import org.ohdsi.webapi.common.generation.ExecutionEngineGenerationEntityToDtoConverter;
import org.ohdsi.webapi.estimation.domain.EstimationGenerationEntity;
import org.springframework.stereotype.Component;
@Component
public class EstimationGenerationToCommonGenerationDtoConverter extends ExecutionEngineGenerationEntityToDtoConverter<EstimationGenerationEntity> {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/converter/EstimationToEstimationShortDTOConverter.java | src/main/java/org/ohdsi/webapi/estimation/converter/EstimationToEstimationShortDTOConverter.java | package org.ohdsi.webapi.estimation.converter;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.webapi.estimation.Estimation;
import org.ohdsi.webapi.estimation.dto.EstimationShortDTO;
import org.ohdsi.webapi.service.converters.BaseCommonEntityToDTOConverter;
import org.springframework.stereotype.Component;
@Component
public class EstimationToEstimationShortDTOConverter <T extends EstimationShortDTO>
extends BaseCommonEntityToDTOConverter<Estimation, T> {
@Override
protected T createResultObject() {
return (T) new EstimationShortDTO();
}
@Override
public void doConvert(Estimation source, T target) {
target.setId(source.getId());
target.setName(StringUtils.trim(source.getName()));
target.setDescription(source.getDescription());
target.setType(source.getType());
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/ComparativeCohortAnalysisSettings.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/ComparativeCohortAnalysisSettings.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.design.EstimationTypeEnum;
import org.ohdsi.webapi.estimation.specification.EstimationAnalysisSettingsImpl;
/**
*
* @author asena5
*/
public class ComparativeCohortAnalysisSettings extends EstimationAnalysisSettingsImpl {
private ComparativeCohortAnalysisImpl analysisSpecification = null;
/**
* The type of estimation analysis to execute
* @return estimationType
**/
@Override
public EstimationTypeEnum getEstimationType() {
return EstimationTypeEnum.COMPARATIVE_COHORT_ANALYSIS;
}
/**
* Get analysisSpecification
* @return analysisSpecification
**/
@Override
public ComparativeCohortAnalysisImpl getAnalysisSpecification() {
return analysisSpecification;
}
/**
*
* @param analysisSpecification
*/
public void setAnalysisSpecification(ComparativeCohortAnalysisImpl analysisSpecification) {
this.analysisSpecification = analysisSpecification;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TrimByPsArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TrimByPsArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.TrimByPsArgs;
import org.ohdsi.webapi.RLangClassImpl;
/**
*
* @author asena5
*/
public class TrimByPsArgsImpl extends RLangClassImpl implements TrimByPsArgs {
private Float trimFraction = 0.05f;
/**
* This fraction will be removed from each target group. In the target group, persons with the highest propensity scores will be removed, in the comparator group person with the lowest scores will be removed.
* @return trimFraction
**/
@Override
public Float getTrimFraction() {
return trimFraction;
}
/**
*
* @param trimFraction
*/
public void setTrimFraction(Float trimFraction) {
this.trimFraction = trimFraction;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CreateStudyPopulationArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CreateStudyPopulationArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.CreateStudyPopulationArgs;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.RemoveDuplicateSubjectsEnum;
/**
*
* @author asena5
*/
public class CreateStudyPopulationArgsImpl extends RLangClassImpl implements CreateStudyPopulationArgs {
private Boolean firstExposureOnly = false;
private Boolean restrictToCommonPeriod = false;
private Integer washoutPeriod = 0;
private RemoveDuplicateSubjectsEnum removeDuplicateSubjects = RemoveDuplicateSubjectsEnum.KEEP_ALL;
private Boolean removeSubjectsWithPriorOutcome = false;
private Integer priorOutcomeLookback = 99999;
private Integer minDaysAtRisk = 1;
private Integer riskWindowStart = 0;
private Boolean addExposureDaysToStart = false;
private Integer riskWindowEnd = 0;
private Boolean addExposureDaysToEnd = true;
private Boolean censorAtNewRiskWindow = null;
/**
* Should only the first exposure per subject be included? Note that this is typically done in the createStudyPopulation function
* @return firstExposureOnly
**/
@Override
public Boolean getFirstExposureOnly() {
return firstExposureOnly;
}
/**
*
* @param firstExposureOnly
*/
public void setFirstExposureOnly(Boolean firstExposureOnly) {
this.firstExposureOnly = firstExposureOnly;
}
/**
* Restrict the analysis to the period when both exposures are observed?
* @return restrictToCommonPeriod
**/
@Override
public Boolean getRestrictToCommonPeriod() {
return restrictToCommonPeriod;
}
/**
*
* @param restrictToCommonPeriod
*/
public void setRestrictToCommonPeriod(Boolean restrictToCommonPeriod) {
this.restrictToCommonPeriod = restrictToCommonPeriod;
}
/**
* The minimum required continuous observation time prior to index date for a person to be included in the cohort.
* @return washoutPeriod
**/
@Override
public Integer getWashoutPeriod() {
return washoutPeriod;
}
/**
*
* @param washoutPeriod
*/
public void setWashoutPeriod(Integer washoutPeriod) {
this.washoutPeriod = washoutPeriod;
}
/**
* Remove subjects that are in both the target and comparator cohort?
* @return removeDuplicateSubjects
**/
@Override
public RemoveDuplicateSubjectsEnum getRemoveDuplicateSubjects() {
return removeDuplicateSubjects;
}
/**
*
* @param removeDuplicateSubjects
*/
public void setRemoveDuplicateSubjects(RemoveDuplicateSubjectsEnum removeDuplicateSubjects) {
this.removeDuplicateSubjects = removeDuplicateSubjects;
}
/**
* Remove subjects that have the outcome prior to the risk window start?
* @return removeSubjectsWithPriorOutcome
**/
@Override
public Boolean getRemoveSubjectsWithPriorOutcome() {
return removeSubjectsWithPriorOutcome;
}
/**
*
* @param removeSubjectsWithPriorOutcome
*/
public void setRemoveSubjectsWithPriorOutcome(Boolean removeSubjectsWithPriorOutcome) {
this.removeSubjectsWithPriorOutcome = removeSubjectsWithPriorOutcome;
}
/**
* How many days should we look back when identifying prior outcomes?
* @return priorOutcomeLookback
**/
@Override
public Integer getPriorOutcomeLookback() {
return priorOutcomeLookback;
}
/**
*
* @param priorOutcomeLookback
*/
public void setPriorOutcomeLookback(Integer priorOutcomeLookback) {
this.priorOutcomeLookback = priorOutcomeLookback;
}
/**
* The minimum required number of days at risk.
* @return minDaysAtRisk
**/
@Override
public Integer getMinDaysAtRisk() {
return minDaysAtRisk;
}
/**
*
* @param minDaysAtRisk
*/
public void setMinDaysAtRisk(Integer minDaysAtRisk) {
this.minDaysAtRisk = minDaysAtRisk;
}
/**
* The start of the risk window (in days) relative to the indexdate (+ days of exposure if theaddExposureDaysToStart parameter is specified).
* @return riskWindowStart
**/
@Override
public Integer getRiskWindowStart() {
return riskWindowStart;
}
/**
*
* @param riskWindowStart
*/
public void setRiskWindowStart(Integer riskWindowStart) {
this.riskWindowStart = riskWindowStart;
}
/**
* Add the length of exposure the start of the risk window?
* @return addExposureDaysToStart
**/
@Override
public Boolean getAddExposureDaysToStart() {
return addExposureDaysToStart;
}
/**
*
* @param addExposureDaysToStart
*/
public void setAddExposureDaysToStart(Boolean addExposureDaysToStart) {
this.addExposureDaysToStart = addExposureDaysToStart;
}
/**
* The end of the risk window (in days) relative to the index date (+ days of exposure if the addExposureDaysToEnd parameter is specified).
* @return riskWindowEnd
**/
@Override
public Integer getRiskWindowEnd() {
return riskWindowEnd;
}
/**
*
* @param riskWindowEnd
*/
public void setRiskWindowEnd(Integer riskWindowEnd) {
this.riskWindowEnd = riskWindowEnd;
}
/**
* Add the length of exposure the risk window?
* @return addExposureDaysToEnd
**/
@Override
public Boolean getAddExposureDaysToEnd() {
return addExposureDaysToEnd;
}
/**
*
* @param addExposureDaysToEnd
*/
public void setAddExposureDaysToEnd(Boolean addExposureDaysToEnd) {
this.addExposureDaysToEnd = addExposureDaysToEnd;
}
/**
* If a subject is in multiple cohorts, should time-at-risk be censored when the new time-at-risk start to prevent overlap?
* @return censorAtNewRiskWindow
**/
@Override
public Boolean getCensorAtNewRiskWindow() {
return censorAtNewRiskWindow;
}
/**
*
* @param censorAtNewRiskWindow
*/
public void setCensorAtNewRiskWindow(Boolean censorAtNewRiskWindow) {
this.censorAtNewRiskWindow = censorAtNewRiskWindow;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/StratifyByPsAndCovariatesArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/StratifyByPsAndCovariatesArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.StratifyByPsAndCovariatesArgs;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.BaseSelectionEnum;
import org.ohdsi.webapi.RLangClassImpl;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.*;
/**
*
* @author asena5
*/
public class StratifyByPsAndCovariatesArgsImpl extends RLangClassImpl implements StratifyByPsAndCovariatesArgs {
private Integer numberOfStrata = 5;
private BaseSelectionEnum baseSelection = BaseSelectionEnum.ALL;
private List<Integer> covariateIds = null;
/**
* Into how many strata should the propensity score be divided? The boundaries of the strata are automatically defined to contain equal numbers of target persons.
* @return numberOfStrata
**/
@Override
@NotNull
public Integer getNumberOfStrata() {
return numberOfStrata;
}
/**
*
* @param numberOfStrata
*/
public void setNumberOfStrata(Integer numberOfStrata) {
this.numberOfStrata = numberOfStrata;
}
/**
* What is the base selection of subjects where the strata bounds are to be determined? Strata are defined as equally-sized strata inside this selection. Possible values are \"all\", \"target\", and \"comparator\".
* @return baseSelection
**/
@Override
public BaseSelectionEnum getBaseSelection() {
return baseSelection;
}
/**
*
* @param baseSelection
*/
public void setBaseSelection(BaseSelectionEnum baseSelection) {
this.baseSelection = baseSelection;
}
/**
*
* @param covariateIdsItem
* @return
*/
public StratifyByPsAndCovariatesArgsImpl addCovariateIdsItem(Integer covariateIdsItem) {
if (this.covariateIds == null) {
this.covariateIds = new ArrayList<>();
}
this.covariateIds.add(covariateIdsItem);
return this;
}
/**
* One or more covariate IDs in the cohortMethodData object on which subjects should also be stratified.
* @return covariateIds
**/
@Override
public List<Integer> getCovariateIds() {
return covariateIds;
}
/**
*
* @param covariateIds
*/
public void setCovariateIds(List<Integer> covariateIds) {
this.covariateIds = covariateIds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/ComparativeCohortAnalysisImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/ComparativeCohortAnalysisImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import java.util.ArrayList;
import java.util.List;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.ComparativeCohortAnalysis;
/**
*
* @author asena5
*/
public class ComparativeCohortAnalysisImpl implements ComparativeCohortAnalysis {
private List<TargetComparatorOutcomesImpl> targetComparatorOutcomes = null;
private List<CohortMethodAnalysisImpl> cohortMethodAnalysisList = null;
/**
*
* @param targetComparatorOutcomesItem
* @return
*/
public ComparativeCohortAnalysisImpl addTargetComparatorOutcomesItem(TargetComparatorOutcomesImpl targetComparatorOutcomesItem) {
if (this.targetComparatorOutcomes == null) {
this.targetComparatorOutcomes = new ArrayList<>();
}
this.targetComparatorOutcomes.add(targetComparatorOutcomesItem);
return this;
}
/**
* The list of targetComparatorOutcomes
* @return targetComparatorOutcomes
**/
@Override
public List<TargetComparatorOutcomesImpl> getTargetComparatorOutcomes() {
return targetComparatorOutcomes;
}
/**
*
* @param targetComparatorOutcomes
*/
public void setTargetComparatorOutcomes(List<TargetComparatorOutcomesImpl> targetComparatorOutcomes) {
this.targetComparatorOutcomes = targetComparatorOutcomes;
}
/**
*
* @param cohortMethodAnalysisListItem
* @return
*/
public ComparativeCohortAnalysisImpl addCohortMethodAnalysisListItem(CohortMethodAnalysisImpl cohortMethodAnalysisListItem) {
if (this.cohortMethodAnalysisList == null) {
this.cohortMethodAnalysisList = new ArrayList<>();
}
this.cohortMethodAnalysisList.add(cohortMethodAnalysisListItem);
return this;
}
/**
* The list of comparative cohort analyses for CohortMethod
* @return cohortMethodAnalysisList
**/
@Override
public List<CohortMethodAnalysisImpl> getCohortMethodAnalysisList() {
return cohortMethodAnalysisList;
}
/**
*
* @param cohortMethodAnalysisList
*/
public void setCohortMethodAnalysisList(List<CohortMethodAnalysisImpl> cohortMethodAnalysisList) {
this.cohortMethodAnalysisList = cohortMethodAnalysisList;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/MatchOnPsAndCovariatesArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/MatchOnPsAndCovariatesArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.CaliperScaleEnum;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.MatchOnPsAndCovariatesArgs;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author asena5
*/
public class MatchOnPsAndCovariatesArgsImpl extends RLangClassImpl implements MatchOnPsAndCovariatesArgs {
private Float caliper = 0.2f;
private CaliperScaleEnum caliperScale = CaliperScaleEnum.STANDARDIZED_LOGIT;
private Integer maxRatio = 1;
private List<Integer> covariateIds = null;
/**
* The caliper for matching. A caliper is the distance which is acceptable for any match. Observations which are outside of the caliper are dropped. A caliper of 0 means no caliper is used.
* @return caliper
**/
@Override
public Float getCaliper() {
return caliper;
}
/**
*
* @param caliper
*/
public void setCaliper(Float caliper) {
this.caliper = caliper;
}
/**
* The scale on which the caliper is defined. Three scales are supported are 'propensity score', 'standardized', or 'standardized logit'. On the standardized scale, the caliper is interpreted in standard deviations of the propensity score distribution. 'standardized logit' is similar, except that the propensity score is transformed to the logit scale because the PS is more likely to be normally distributed on that scale(Austin, 2011).
* @return caliperScale
**/
@Override
public CaliperScaleEnum getCaliperScale() {
return caliperScale;
}
/**
*
* @param caliperScale
*/
public void setCaliperScale(CaliperScaleEnum caliperScale) {
this.caliperScale = caliperScale;
}
/**
* The maximum number of persons int the comparator arm to be matched to each person in the target arm. A maxRatio of 0 means no maximum - all comparators will be assigned to a target person.
* @return maxRatio
**/
@Override
public Integer getMaxRatio() {
return maxRatio;
}
/**
*
* @param maxRatio
*/
public void setMaxRatio(Integer maxRatio) {
this.maxRatio = maxRatio;
}
/**
*
* @param covariateIdsItem
* @return
*/
public MatchOnPsAndCovariatesArgsImpl addCovariateIdsItem(Integer covariateIdsItem) {
if (this.covariateIds == null) {
this.covariateIds = new ArrayList<>();
}
this.covariateIds.add(covariateIdsItem);
return this;
}
/**
* One or more covariate IDs in the cohortMethodData object on which subjects should be also matched.
* @return covariateIds
**/
@Override
public List<Integer> getCovariateIds() {
return covariateIds;
}
/**
*
* @param covariateIds
*/
public void setCovariateIds(List<Integer> covariateIds) {
this.covariateIds = covariateIds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/MatchOnPsArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/MatchOnPsArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.CaliperScaleEnum;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.MatchOnPsArgs;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author asena5
*/
public class MatchOnPsArgsImpl extends RLangClassImpl implements MatchOnPsArgs {
private Float caliper = 0.2f;
private CaliperScaleEnum caliperScale = CaliperScaleEnum.STANDARDIZED_LOGIT;
private Integer maxRatio = 1;
private List<String> stratificationColumns = null;
/**
* The caliper for matching. A caliper is the distance which is acceptable for any match. Observations which are outside of the caliper are dropped. A caliper of 0 means no caliper is used.
* @return caliper
**/
@Override
public Float getCaliper() {
return caliper;
}
/**
*
* @param caliper
*/
public void setCaliper(Float caliper) {
this.caliper = caliper;
}
/**
* The scale on which the caliper is defined. Three scales are supported are 'propensity score', 'standardized', or 'standardized logit'. On the standardized scale, the caliper is interpreted in standard deviations of the propensity score distribution. 'standardized logit' is similar, except that the propensity score is transformed to the logit scale because the PS is more likely to be normally distributed on that scale(Austin, 2011).
* @return caliperScale
**/
@Override
public CaliperScaleEnum getCaliperScale() {
return caliperScale;
}
/**
*
* @param caliperScale
*/
public void setCaliperScale(CaliperScaleEnum caliperScale) {
this.caliperScale = caliperScale;
}
/**
* The maximum number of persons int the comparator arm to be matched to each person in the target arm. A maxRatio of 0 means no maximum - all comparators will be assigned to a target person.
* @return maxRatio
**/
@Override
public Integer getMaxRatio() {
return maxRatio;
}
/**
*
* @param maxRatio
*/
public void setMaxRatio(Integer maxRatio) {
this.maxRatio = maxRatio;
}
/**
*
* @param stratificationColumnsItem
* @return
*/
public MatchOnPsArgsImpl addStratificationColumnsItem(String stratificationColumnsItem) {
if (this.stratificationColumns == null) {
this.stratificationColumns = new ArrayList<>();
}
this.stratificationColumns.add(stratificationColumnsItem);
return this;
}
/**
* Names or numbers of one or more columns in the data data.frame on which subjects should be stratified prior to matching. No personswill be matched with persons outside of the strata identified by thevalues in these columns.
* @return stratificationColumns
**/
@Override
public List<String> getStratificationColumns() {
return stratificationColumns;
}
/**
*
* @param stratificationColumns
*/
public void setStratificationColumns(List<String> stratificationColumns) {
this.stratificationColumns = stratificationColumns;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/FitOutcomeModelArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/FitOutcomeModelArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.OutcomeModelTypeEnum;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.FitOutcomeModelArgs;
import org.ohdsi.webapi.RLangClassImpl;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.*;
import org.ohdsi.webapi.cyclops.specification.ControlImpl;
import org.ohdsi.webapi.cyclops.specification.PriorImpl;
/**
*
* @author asena5
*/
public class FitOutcomeModelArgsImpl extends RLangClassImpl implements FitOutcomeModelArgs {
private OutcomeModelTypeEnum modelType = OutcomeModelTypeEnum.COX;
private Boolean stratified = true;
private Boolean useCovariates = true;
private Boolean inversePtWeighting = false;
private List<Integer> interactionCovariateIds = null;
private List<Integer> excludeCovariateIds = null;
private List<Integer> includeCovariateIds = null;
private PriorImpl prior = null;
private ControlImpl control = null;
/**
* The type of outcome model that will be used. Possible values are \"logistic\", \"poisson\", or \"cox\".
* @return modelType
**/
@Override
@NotNull
public OutcomeModelTypeEnum getModelType() {
return modelType;
}
/**
*
* @param modelType
*/
public void setModelType(OutcomeModelTypeEnum modelType) {
this.modelType = modelType;
}
/**
* Should the regression be conditioned on the strata defined in the population object (e.g. by matching or stratifying on propensity scores)?
* @return stratified
**/
@Override
public Boolean getStratified() {
return stratified;
}
/**
*
* @param stratified
*/
public void setStratified(Boolean stratified) {
this.stratified = stratified;
}
/**
* Whether to use the covariate matrix in the cohortMethodDataobject in the outcome model.
* @return useCovariates
**/
@Override
public Boolean getUseCovariates() {
return useCovariates;
}
/**
*
* @param useCovariates
*/
public void setUseCovariates(Boolean useCovariates) {
this.useCovariates = useCovariates;
}
/**
* Use inverse probability of treatment weighting?
* @return inversePtWeighting
**/
@Override
public Boolean getInversePtWeighting() {
return inversePtWeighting;
}
/**
*
* @param inversePtWeighting
*/
public void setInversePtWeighting(Boolean inversePtWeighting) {
this.inversePtWeighting = inversePtWeighting;
}
/**
*
* @param interactionCovariateIdsItem
* @return
*/
public FitOutcomeModelArgsImpl addInteractionCovariateIdsItem(Integer interactionCovariateIdsItem) {
if (this.interactionCovariateIds == null) {
this.interactionCovariateIds = new ArrayList<>();
}
this.interactionCovariateIds.add(interactionCovariateIdsItem);
return this;
}
/**
* An optional vector of covariate IDs to use to estimate interactions with the main treatment effect.
* @return interactionCovariateIds
**/
@Override
public List<Integer> getInteractionCovariateIds() {
return interactionCovariateIds;
}
/**
*
* @param interactionCovariateIds
*/
public void setInteractionCovariateIds(List<Integer> interactionCovariateIds) {
this.interactionCovariateIds = interactionCovariateIds;
}
/**
*
* @param excludeCovariateIdsItem
* @return
*/
public FitOutcomeModelArgsImpl addExcludeCovariateIdsItem(Integer excludeCovariateIdsItem) {
if (this.excludeCovariateIds == null) {
this.excludeCovariateIds = new ArrayList<>();
}
this.excludeCovariateIds.add(excludeCovariateIdsItem);
return this;
}
/**
* Exclude these covariates from the outcome model.
* @return excludeCovariateIds
**/
@Override
public List<Integer> getExcludeCovariateIds() {
return excludeCovariateIds;
}
/**
*
* @param excludeCovariateIds
*/
public void setExcludeCovariateIds(List<Integer> excludeCovariateIds) {
this.excludeCovariateIds = excludeCovariateIds;
}
/**
*
* @param includeCovariateIdsItem
* @return
*/
public FitOutcomeModelArgsImpl addIncludeCovariateIdsItem(Integer includeCovariateIdsItem) {
if (this.includeCovariateIds == null) {
this.includeCovariateIds = new ArrayList<>();
}
this.includeCovariateIds.add(includeCovariateIdsItem);
return this;
}
/**
* Include only these covariates in the outcome model.
* @return includeCovariateIds
**/
@Override
public List<Integer> getIncludeCovariateIds() {
return includeCovariateIds;
}
/**
*
* @param includeCovariateIds
*/
public void setIncludeCovariateIds(List<Integer> includeCovariateIds) {
this.includeCovariateIds = includeCovariateIds;
}
/**
* Get prior
* @return prior
**/
@Override
public PriorImpl getPrior() {
return prior;
}
/**
*
* @param prior
*/
public void setPrior(PriorImpl prior) {
this.prior = prior;
}
/**
* Get control
* @return control
**/
@Override
public ControlImpl getControl() {
return control;
}
/**
*
* @param control
*/
public void setControl(ControlImpl control) {
this.control = control;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CohortMethodAnalysisImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CohortMethodAnalysisImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.CohortMethodAnalysis;
import org.ohdsi.webapi.estimation.specification.AnalysisImpl;
/**
* CohortMethodAnalysisImpl
*/
public class CohortMethodAnalysisImpl extends AnalysisImpl implements CohortMethodAnalysis {
private String targetType = null;
private String comparatorType = null;
private GetDbCohortMethodDataArgsImpl getDbCohortMethodDataArgs = null;
private CreateStudyPopulationArgsImpl createStudyPopArgs = null;
private Boolean createPs = false;
private CreatePsArgsImpl createPsArgs = null;
private Boolean trimByPs = false;
private TrimByPsArgsImpl trimByPsArgs = null;
private Boolean trimByPsToEquipoise = false;
private TrimByPsToEquipoiseArgsImpl trimByPsToEquipoiseArgs = null;
private Boolean matchOnPs = false;
private MatchOnPsArgsImpl matchOnPsArgs = null;
private Boolean matchOnPsAndCovariates = false;
private MatchOnPsAndCovariatesArgsImpl matchOnPsAndCovariatesArgs = null;
private Boolean stratifyByPs = false;
private StratifyByPsArgsImpl stratifyByPsArgs = null;
private Boolean stratifyByPsAndCovariates = false;
private StratifyByPsAndCovariatesArgsImpl stratifyByPsAndCovariatesArgs = null;
private Boolean fitOutcomeModel = false;
private FitOutcomeModelArgsImpl fitOutcomeModelArgs = null;
/**
*
* @param attrClass
*/
@Override
public void setAttrClass(String attrClass) {
super.setAttrClass("cmAnalysis");
}
/**
* If more than one target is provided for each drugComparatorOutcome, this field should be used to select the specific target to use in this analysis.
* @return targetType
**/
@Override
public String getTargetType() {
return targetType;
}
/**
*
* @param targetType
*/
public void setTargetType(String targetType) {
this.targetType = targetType;
}
/**
* If more than one comparator is provided for each drugComparatorOutcome, this field should be used to select the specific comparator to use in this analysis.
* @return comparatorType
**/
@Override
public String getComparatorType() {
return comparatorType;
}
/**
*
* @param comparatorType
*/
public void setComparatorType(String comparatorType) {
this.comparatorType = comparatorType;
}
/**
* Get getDbCohortMethodDataArgs
* @return getDbCohortMethodDataArgs
**/
@Override
public GetDbCohortMethodDataArgsImpl getDbCohortMethodDataArgs() {
return getDbCohortMethodDataArgs;
}
/**
*
* @param getDbCohortMethodDataArgs
*/
public void setGetDbCohortMethodDataArgs(GetDbCohortMethodDataArgsImpl getDbCohortMethodDataArgs) {
this.getDbCohortMethodDataArgs = getDbCohortMethodDataArgs;
}
/**
* Get createStudyPopArgs
* @return createStudyPopArgs
**/
@Override
public CreateStudyPopulationArgsImpl getCreateStudyPopArgs() {
return createStudyPopArgs;
}
/**
*
* @param createStudyPopArgs
*/
public void setCreateStudyPopArgs(CreateStudyPopulationArgsImpl createStudyPopArgs) {
this.createStudyPopArgs = createStudyPopArgs;
}
/**
* Should the createPs function be used in this analysis?
* @return createPs
**/
@Override
public Boolean getCreatePs() {
return createPs;
}
/**
*
* @param createPs
*/
public void setCreatePs(Boolean createPs) {
this.createPs = createPs;
}
/**
* Get createPsArgs
* @return createPsArgs
**/
@Override
public CreatePsArgsImpl getCreatePsArgs() {
return createPsArgs;
}
/**
*
* @param createPsArgs
*/
public void setCreatePsArgs(CreatePsArgsImpl createPsArgs) {
this.createPsArgs = createPsArgs;
}
/**
* Should the trimByPs function be used in this analysis?
* @return trimByPs
**/
@Override
public Boolean getTrimByPs() {
return trimByPs;
}
/**
*
* @param trimByPs
*/
public void setTrimByPs(Boolean trimByPs) {
this.trimByPs = trimByPs;
}
/**
* Get trimByPsArgs
* @return trimByPsArgs
**/
@Override
public TrimByPsArgsImpl getTrimByPsArgs() {
return trimByPsArgs;
}
/**
*
* @param trimByPsArgs
*/
public void setTrimByPsArgs(TrimByPsArgsImpl trimByPsArgs) {
this.trimByPsArgs = trimByPsArgs;
}
/**
* Should the trimByPsToEquipoise function be used in this analysis?
* @return trimByPsToEquipoise
**/
@Override
public Boolean getTrimByPsToEquipoise() {
return trimByPsToEquipoise;
}
/**
*
* @param trimByPsToEquipoise
*/
public void setTrimByPsToEquipoise(Boolean trimByPsToEquipoise) {
this.trimByPsToEquipoise = trimByPsToEquipoise;
}
/**
* Get trimByPsToEquipoiseArgs
* @return trimByPsToEquipoiseArgs
**/
@Override
public TrimByPsToEquipoiseArgsImpl getTrimByPsToEquipoiseArgs() {
return trimByPsToEquipoiseArgs;
}
/**
*
* @param trimByPsToEquipoiseArgs
*/
public void setTrimByPsToEquipoiseArgs(TrimByPsToEquipoiseArgsImpl trimByPsToEquipoiseArgs) {
this.trimByPsToEquipoiseArgs = trimByPsToEquipoiseArgs;
}
/**
* Should the matchOnPsAndCovariates function be used in this analysis?
* @return matchOnPs
**/
@Override
public Boolean getMatchOnPs() {
return matchOnPs;
}
/**
*
* @param matchOnPs
*/
public void setMatchOnPs(Boolean matchOnPs) {
this.matchOnPs = matchOnPs;
}
/**
* Get matchOnPsArgs
* @return matchOnPsArgs
**/
@Override
public MatchOnPsArgsImpl getMatchOnPsArgs() {
return matchOnPsArgs;
}
/**
*
* @param matchOnPsArgs
*/
public void setMatchOnPsArgs(MatchOnPsArgsImpl matchOnPsArgs) {
this.matchOnPsArgs = matchOnPsArgs;
}
/**
* Should the matchOnPsAndCovariates function be used in this analysis?
* @return matchOnPsAndCovariates
**/
@Override
public Boolean getMatchOnPsAndCovariates() {
return matchOnPsAndCovariates;
}
/**
*
* @param matchOnPsAndCovariates
*/
public void setMatchOnPsAndCovariates(Boolean matchOnPsAndCovariates) {
this.matchOnPsAndCovariates = matchOnPsAndCovariates;
}
/**
*
* @param matchOnPsAndCovariatesArgs
* @return
*/
public CohortMethodAnalysis matchOnPsAndCovariatesArgs(MatchOnPsAndCovariatesArgsImpl matchOnPsAndCovariatesArgs) {
this.matchOnPsAndCovariatesArgs = matchOnPsAndCovariatesArgs;
return this;
}
/**
* Get matchOnPsAndCovariatesArgs
* @return matchOnPsAndCovariatesArgs
**/
@Override
public MatchOnPsAndCovariatesArgsImpl getMatchOnPsAndCovariatesArgs() {
return matchOnPsAndCovariatesArgs;
}
/**
*
* @param matchOnPsAndCovariatesArgs
*/
public void setMatchOnPsAndCovariatesArgs(MatchOnPsAndCovariatesArgsImpl matchOnPsAndCovariatesArgs) {
this.matchOnPsAndCovariatesArgs = matchOnPsAndCovariatesArgs;
}
/**
* Should the stratifyByPs function be used in this analysis?
* @return stratifyByPs
**/
@Override
public Boolean getStratifyByPs() {
return stratifyByPs;
}
/**
*
* @param stratifyByPs
*/
public void setStratifyByPs(Boolean stratifyByPs) {
this.stratifyByPs = stratifyByPs;
}
/**
* Get stratifyByPsArgs
* @return stratifyByPsArgs
**/
@Override
public StratifyByPsArgsImpl getStratifyByPsArgs() {
return stratifyByPsArgs;
}
/**
*
* @param stratifyByPsArgs
*/
public void setStratifyByPsArgs(StratifyByPsArgsImpl stratifyByPsArgs) {
this.stratifyByPsArgs = stratifyByPsArgs;
}
/**
* Should the stratifyByPsAndCovariates function be used in this analysis?
* @return stratifyByPsAndCovariates
**/
@Override
public Boolean getStratifyByPsAndCovariates() {
return stratifyByPsAndCovariates;
}
/**
*
* @param stratifyByPsAndCovariates
*/
public void setStratifyByPsAndCovariates(Boolean stratifyByPsAndCovariates) {
this.stratifyByPsAndCovariates = stratifyByPsAndCovariates;
}
/**
* Get stratifyByPsAndCovariatesArgs
* @return stratifyByPsAndCovariatesArgs
**/
@Override
public StratifyByPsAndCovariatesArgsImpl getStratifyByPsAndCovariatesArgs() {
return stratifyByPsAndCovariatesArgs;
}
/**
*
* @param stratifyByPsAndCovariatesArgs
*/
public void setStratifyByPsAndCovariatesArgs(StratifyByPsAndCovariatesArgsImpl stratifyByPsAndCovariatesArgs) {
this.stratifyByPsAndCovariatesArgs = stratifyByPsAndCovariatesArgs;
}
/**
* Should the fitOutcomeModel function be used in this analysis?
* @return fitOutcomeModel
**/
@Override
public Boolean getFitOutcomeModel() {
return fitOutcomeModel;
}
/**
*
* @param fitOutcomeModel
*/
public void setFitOutcomeModel(Boolean fitOutcomeModel) {
this.fitOutcomeModel = fitOutcomeModel;
}
/**
* Get fitOutcomeModelArgs
* @return fitOutcomeModelArgs
**/
@Override
public FitOutcomeModelArgsImpl getFitOutcomeModelArgs() {
return fitOutcomeModelArgs;
}
/**
*
* @param fitOutcomeModelArgs
*/
public void setFitOutcomeModelArgs(FitOutcomeModelArgsImpl fitOutcomeModelArgs) {
this.fitOutcomeModelArgs = fitOutcomeModelArgs;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/GetDbCohortMethodDataArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/GetDbCohortMethodDataArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.GetDbCohortMethodDataArgs;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.RemoveDuplicateSubjectsEnum;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.webapi.featureextraction.specification.CovariateSettingsImpl;
import java.util.Date;
/**
* Create a parameter object for the function getDbCohortMethodData
*/
public class GetDbCohortMethodDataArgsImpl extends RLangClassImpl implements GetDbCohortMethodDataArgs {
private Date studyStartDate = null;
private Date studyEndDate = null;
private Boolean excludeDrugsFromCovariates = true;
private Boolean firstExposureOnly = false;
private RemoveDuplicateSubjectsEnum removeDuplicateSubjects = RemoveDuplicateSubjectsEnum.KEEP_ALL;
private Boolean restrictToCommonPeriod = false;
private Integer washoutPeriod = 0;
private Integer maxCohortSize = 0;
private CovariateSettingsImpl covariateSettings = null;
/**
* A calendar date specifying the minimum date that a cohort index date can appear. Date format is 'yyyymmdd'.
* @return studyStartDate
**/
@Override
public Date getStudyStartDate() {
return this.studyStartDate == null ? null : new Date(this.studyStartDate.getTime());
}
/**
*
* @param studyStartDate
*/
public void setStudyStartDate(Date studyStartDate) {
this.studyStartDate = studyStartDate == null ? null : new Date(studyStartDate.getTime());
}
/**
* A calendar date specifying the maximum date that a cohort index date can appear. Date format is 'yyyymmdd'. Important - the study end date is also used to truncate risk windows, meaning no outcomes beyond the study end date will be considered.
* @return studyEndDate
**/
@Override
public Date getStudyEndDate() {
return this.studyEndDate == null ? null : new Date(this.studyEndDate.getTime());
}
/**
*
* @param studyEndDate
*/
public void setStudyEndDate(Date studyEndDate) {
this.studyEndDate = studyEndDate == null ? null : new Date(studyEndDate.getTime());
}
/**
* Should the target and comparator drugs (and their descendant concepts) be excluded from the covariates? Note that this will work if the drugs are actualy drug concept IDs (and not cohort IDs).
* @return excludeDrugsFromCovariates
**/
@Override
public Boolean getExcludeDrugsFromCovariates() {
return excludeDrugsFromCovariates;
}
/**
*
* @param excludeDrugsFromCovariates
*/
public void setExcludeDrugsFromCovariates(Boolean excludeDrugsFromCovariates) {
this.excludeDrugsFromCovariates = excludeDrugsFromCovariates;
}
/**
* Should only the first exposure per subject be included? Note that this is typically done in the createStudyPopulation function, but can already be done here for efficiency reasons.
* @return firstExposureOnly
**/
@Override
public Boolean getFirstExposureOnly() {
return firstExposureOnly;
}
/**
*
* @param firstExposureOnly
*/
public void setFirstExposureOnly(Boolean firstExposureOnly) {
this.firstExposureOnly = firstExposureOnly;
}
/**
* Remove subjects that are in both the target and comparator cohort? Note that this is typically done in the createStudyPopulation function, but can already be done here for efficiency reasons.
* @return removeDuplicateSubjects
**/
@Override
public RemoveDuplicateSubjectsEnum getRemoveDuplicateSubjects() {
return removeDuplicateSubjects;
}
/**
*
* @param removeDuplicateSubjects
*/
public void setRemoveDuplicateSubjects(RemoveDuplicateSubjectsEnum removeDuplicateSubjects) {
this.removeDuplicateSubjects = removeDuplicateSubjects;
}
/**
* Restrict the analysis to the period when both exposures are observed?
* @return restrictToCommonPeriod
**/
@Override
public Boolean getRestrictToCommonPeriod() {
return restrictToCommonPeriod;
}
/**
*
* @param restrictToCommonPeriod
*/
public void setRestrictToCommonPeriod(Boolean restrictToCommonPeriod) {
this.restrictToCommonPeriod = restrictToCommonPeriod;
}
/**
* The mininum required continuous observation time prior to index date for a person to be included in the cohort. Note that this is typically done in the createStudyPopulation function,but can already be done here for efficiency reasons.
* @return washoutPeriod
**/
@Override
public Integer getWashoutPeriod() {
return washoutPeriod;
}
/**
*
* @param washoutPeriod
*/
public void setWashoutPeriod(Integer washoutPeriod) {
this.washoutPeriod = washoutPeriod;
}
/**
* If either the target or the comparator cohort is larger than this number it will be sampled to this size. maxCohortSize = 0 indicates no maximum size.
* @return maxCohortSize
**/
@Override
public Integer getMaxCohortSize() {
return maxCohortSize;
}
/**
*
* @param maxCohortSize
*/
public void setMaxCohortSize(Integer maxCohortSize) {
this.maxCohortSize = maxCohortSize;
}
/**
* Get covariateSettings
* @return covariateSettings
**/
@Override
public CovariateSettingsImpl getCovariateSettings() {
return covariateSettings;
}
/**
*
* @param covariateSettings
*/
public void setCovariateSettings(CovariateSettingsImpl covariateSettings) {
this.covariateSettings = covariateSettings;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TargetComparatorOutcomesImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TargetComparatorOutcomesImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.*;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.TargetComparatorOutcomes;
import org.ohdsi.webapi.estimation.specification.TargetOutcomeImpl;
/**
*
* @author asena5
*/
public class TargetComparatorOutcomesImpl extends TargetOutcomeImpl implements TargetComparatorOutcomes {
private Long comparatorId = null;
private List<Long> excludedCovariateConceptIds = null;
private List<Long> includedCovariateConceptIds = null;
/**
* Comparator cohort id
* @return comparatorId
**/
@NotNull
@Override
public Long getComparatorId() {
return comparatorId;
}
/**
*
* @param comparatorId
*/
public void setComparatorId(Long comparatorId) {
this.comparatorId = comparatorId;
}
/**
*
* @param excludedCovariateConceptIdsItem
* @return
*/
public TargetComparatorOutcomesImpl addExcludedCovariateConceptIdsItem(Long excludedCovariateConceptIdsItem) {
if (this.excludedCovariateConceptIds == null) {
this.excludedCovariateConceptIds = new ArrayList<>();
}
this.excludedCovariateConceptIds.add(excludedCovariateConceptIdsItem);
return this;
}
/**
* A list of concept IDs that cannot be used to construct covariates. This argument is to be used only for exclusionconcepts that are specific to the drug-comparator combination.
* @return excludedCovariateConceptIds
**/
@Override
public List<Long> getExcludedCovariateConceptIds() {
return excludedCovariateConceptIds;
}
/**
*
* @param excludedCovariateConceptIds
*/
public void setExcludedCovariateConceptIds(List<Long> excludedCovariateConceptIds) {
this.excludedCovariateConceptIds = excludedCovariateConceptIds;
}
/**
*
* @param includedCovariateConceptIdsItem
* @return
*/
public TargetComparatorOutcomesImpl addIncludedCovariateConceptIdsItem(Long includedCovariateConceptIdsItem) {
if (this.includedCovariateConceptIds == null) {
this.includedCovariateConceptIds = new ArrayList<>();
}
this.includedCovariateConceptIds.add(includedCovariateConceptIdsItem);
return this;
}
/**
* A list of concept IDs that must be used to construct covariates. This argument is to be used only for inclusion concepts that are specific to the drug-comparator combination.
* @return includedCovariateConceptIds
**/
@Override
public List<Long> getIncludedCovariateConceptIds() {
return includedCovariateConceptIds;
}
/**
*
* @param includedCovariateConceptIds
*/
public void setIncludedCovariateConceptIds(List<Long> includedCovariateConceptIds) {
this.includedCovariateConceptIds = includedCovariateConceptIds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TrimByPsToEquipoiseArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/TrimByPsToEquipoiseArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.TrimByPsToEquipoiseArgs;
import org.ohdsi.webapi.RLangClassImpl;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author asena5
*/
public class TrimByPsToEquipoiseArgsImpl extends RLangClassImpl implements TrimByPsToEquipoiseArgs {
private List<Float> bounds = null;
/**
*
* @param boundsItem
* @return
*/
public TrimByPsToEquipoiseArgsImpl addBoundsItem(Float boundsItem) {
if (this.bounds == null) {
this.bounds = new ArrayList<>();
}
this.bounds.add(boundsItem);
return this;
}
/**
* The upper and lower bound on the preference score for keeping persons
* @return bounds
**/
@Override
public List<Float> getBounds() {
return bounds;
}
/**
*
* @param bounds
*/
public void setBounds(List<Float> bounds) {
this.bounds = bounds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/StratifyByPsArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/StratifyByPsArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.BaseSelectionEnum;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.StratifyByPsArgs;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.*;
/**
*
* @author asena5
*/
public class StratifyByPsArgsImpl extends RLangClassImpl implements StratifyByPsArgs {
private Integer numberOfStrata = 5;
private BaseSelectionEnum baseSelection = BaseSelectionEnum.ALL;
private List<String> stratificationColumns = null;
/**
* How many strata? The boundaries of the strata are automatically defined to contain equal numbers of target persons.
* @return numberOfStrata
**/
@Override
@NotNull
public Integer getNumberOfStrata() {
return numberOfStrata;
}
/**
*
* @param numberOfStrata
*/
public void setNumberOfStrata(Integer numberOfStrata) {
this.numberOfStrata = numberOfStrata;
}
/**
* What is the base selection of subjects where the strata bounds are to be determined? Strata are defined as equally-sized strata inside this selection. Possible values are \"all\", \"target\", and \"comparator\".
* @return baseSelection
**/
@Override
public BaseSelectionEnum getBaseSelection() {
return baseSelection;
}
/**
*
* @param baseSelection
*/
public void setBaseSelection(BaseSelectionEnum baseSelection) {
this.baseSelection = baseSelection;
}
/**
*
* @param stratificationColumnsItem
* @return
*/
public StratifyByPsArgsImpl addStratificationColumnsItem(String stratificationColumnsItem) {
if (this.stratificationColumns == null) {
this.stratificationColumns = new ArrayList<>();
}
this.stratificationColumns.add(stratificationColumnsItem);
return this;
}
/**
* Names of one or more columns in the data data.frame on which subjects should also be stratified in addition to stratification on propensity score.
* @return stratificationColumns
**/
@Override
public List<String> getStratificationColumns() {
return stratificationColumns;
}
/**
*
* @param stratificationColumns
*/
public void setStratificationColumns(List<String> stratificationColumns) {
this.stratificationColumns = stratificationColumns;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CreatePsArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/comparativecohortanalysis/specification/CreatePsArgsImpl.java | package org.ohdsi.webapi.estimation.comparativecohortanalysis.specification;
import org.ohdsi.webapi.RLangClassImpl;
import java.util.List;
import org.ohdsi.webapi.cyclops.specification.ControlImpl;
import org.ohdsi.webapi.cyclops.specification.PriorImpl;
import org.ohdsi.analysis.estimation.comparativecohortanalysis.design.CreatePsArgs;
/**
*
* @author asena5
*/
public class CreatePsArgsImpl extends RLangClassImpl implements CreatePsArgs {
private List<Integer> excludeCovariateIds = null;
private List<Integer> includeCovariateIds = null;
private Integer maxCohortSizeForFitting = 250000;
private Boolean errorOnHighCorrelation = true;
private Boolean stopOnError = true;
private PriorImpl prior = null;
private ControlImpl control = null;
/**
* Exclude these covariates from the propensity model.
* @return excludeCovariateIds
**/
@Override
public List<Integer> getExcludeCovariateIds() {
return excludeCovariateIds;
}
/**
*
* @param excludeCovariateIds
*/
public void setExcludeCovariateIds(List<Integer> excludeCovariateIds) {
this.excludeCovariateIds = excludeCovariateIds;
}
/**
* Include only these covariates in the propensity model.
* @return includeCovariateIds
**/
@Override
public List<Integer> getIncludeCovariateIds() {
return includeCovariateIds;
}
/**
*
* @param includeCovariateIds
*/
public void setIncludeCovariateIds(List<Integer> includeCovariateIds) {
this.includeCovariateIds = includeCovariateIds;
}
/**
* If the target or comparator cohort are larger than this number, they will be down-sampled before fitting the propensity model. The model will be used to compute propensity scores for all subjects. The purpose of the sampling is to gain speed. Setting this number to 0 means no down-sampling will be applied.
* @return maxCohortSizeForFitting
**/
@Override
public Integer getMaxCohortSizeForFitting() {
return maxCohortSizeForFitting;
}
/**
*
* @param maxCohortSizeForFitting
*/
public void setMaxCohortSizeForFitting(Integer maxCohortSizeForFitting) {
this.maxCohortSizeForFitting = maxCohortSizeForFitting;
}
/**
* If true, the function will test each covariate for correlation with the target assignment. If any covariate has an unusually high correlation (either positive or negative), this will throw an error.
* @return errorOnHighCorrelation
**/
@Override
public Boolean getErrorOnHighCorrelation() {
return errorOnHighCorrelation;
}
/**
*
* @param errorOnHighCorrelation
*/
public void setErrorOnHighCorrelation(Boolean errorOnHighCorrelation) {
this.errorOnHighCorrelation = errorOnHighCorrelation;
}
/**
* If an error occurs, should the function stop? Else, the two cohorts will be assumed to be perfectly separable.
* @return stopOnError
**/
@Override
public Boolean getStopOnError() {
return stopOnError;
}
/**
*
* @param stopOnError
*/
public void setStopOnError(Boolean stopOnError) {
this.stopOnError = stopOnError;
}
/**
* Get prior
* @return prior
**/
@Override
public PriorImpl getPrior() {
return prior;
}
/**
*
* @param prior
*/
public void setPrior(PriorImpl prior) {
this.prior = prior;
}
/**
* Get control
* @return control
**/
@Override
public ControlImpl getControl() {
return control;
}
/**
*
* @param control
*/
public void setControl(ControlImpl control) {
this.control = control;
}
/**
*
* @param attrClass
* @return
*/
public CreatePsArgsImpl attrClass(String attrClass) {
this.attrClass = attrClass;
return this;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/TargetOutcomeImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/TargetOutcomeImpl.java | package org.ohdsi.webapi.estimation.specification;
import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.*;
import org.ohdsi.analysis.estimation.design.TargetOutcome;
/**
*
* @author asena5
*/
public class TargetOutcomeImpl implements TargetOutcome {
private Long targetId = null;
private List<Long> outcomeIds = new ArrayList<>();
/**
* Target cohort id
* @return targetId
**/
@NotNull
@Override
public Long getTargetId() {
return targetId;
}
/**
*
* @param targetId
*/
public void setTargetId(Long targetId) {
this.targetId = targetId;
}
/**
*
* @param outcomeIdsItem
* @return
*/
public TargetOutcomeImpl addOutcomeIdsItem(Long outcomeIdsItem) {
this.outcomeIds.add(outcomeIdsItem);
return this;
}
/**
* The list of outcome cohort ids
* @return outcomeIds
**/
@NotNull
@Override
public List<Long> getOutcomeIds() {
return outcomeIds;
}
/**
*
* @param outcomeIds
*/
public void setOutcomeIds(List<Long> outcomeIds) {
this.outcomeIds = outcomeIds;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/AnalysisImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/AnalysisImpl.java | package org.ohdsi.webapi.estimation.specification;
import org.ohdsi.webapi.RLangClassImpl;
import org.ohdsi.analysis.estimation.design.Analysis;
/**
*
* @author asena5
*/
public class AnalysisImpl extends RLangClassImpl implements Analysis {
private Integer analysisId = null;
private String description = null;
/**
* Unique identifier for the analysis
* @return analysisId
**/
@Override
public Integer getAnalysisId() {
return analysisId;
}
/**
*
* @param analysisId
*/
public void setAnalysisId(Integer analysisId) {
this.analysisId = analysisId;
}
/**
* Description of the analysis
* @return description
**/
@Override
public String getDescription() {
return description;
}
/**
*
* @param description
*/
public void setDescription(String description) {
this.description = description;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlImpl.java | package org.ohdsi.webapi.estimation.specification;
import org.ohdsi.analysis.estimation.design.NegativeControl;
import org.ohdsi.analysis.estimation.design.NegativeControlTypeEnum;
/**
*
* @author asena5
*/
public class NegativeControlImpl implements NegativeControl {
private Long targetId = null;
private Long comparatorId = null;
private Long outcomeId = null;
private String outcomeName = null;
private NegativeControlTypeEnum type = NegativeControlTypeEnum.OUTCOME;
/**
* The identifier for the target cohort
* @return targetId
**/
@Override
public Long getTargetId() {
return targetId;
}
/**
*
* @param targetId
*/
public void setTargetId(Long targetId) {
this.targetId = targetId;
}
/**
* The identifier for the comparator cohort
* @return comparatorId
**/
@Override
public Long getComparatorId() {
return comparatorId;
}
/**
*
* @param comparatorId
*/
public void setComparatorId(Long comparatorId) {
this.comparatorId = comparatorId;
}
/**
* The identifier for the negative control cohort
* @return outcomeId
**/
@Override
public Long getOutcomeId() {
return outcomeId;
}
/**
*
* @param outcomeId
*/
public void setOutcomeId(Long outcomeId) {
this.outcomeId = outcomeId;
}
/**
* The name of the negative control cohort
* @return outcomeName
**/
@Override
public String getOutcomeName() {
return outcomeName;
}
/**
*
* @param outcomeName
*/
public void setOutcomeName(String outcomeName) {
this.outcomeName = outcomeName;
}
/**
* The type of negative control
* @return type
**/
@Override
public NegativeControlTypeEnum getType() {
return type;
}
/**
*
* @param type
*/
public void setType(NegativeControlTypeEnum type) {
this.type = type;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/EstimationAnalysisImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/EstimationAnalysisImpl.java | package org.ohdsi.webapi.estimation.specification;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import org.ohdsi.analysis.hydra.design.SkeletonTypeEnum;
import org.ohdsi.analysis.estimation.design.EstimationAnalysis;
import org.ohdsi.webapi.CommonDTO;
import org.ohdsi.webapi.analysis.AnalysisCohortDefinition;
import org.ohdsi.webapi.analysis.AnalysisConceptSet;
import org.ohdsi.webapi.conceptset.ConceptSetCrossReferenceImpl;
/**
*
* @author asena5
*/
@JsonIgnoreProperties(ignoreUnknown=true, value = {"createdBy", "createdDate", "modifiedBy", "modifiedDate"})
@JsonInclude(JsonInclude.Include.NON_NULL)
public class EstimationAnalysisImpl implements EstimationAnalysis, CommonDTO {
private Integer id = null;
private String name = null;
private String description = null;
private String version = null;
private String organizationName = null;
private String packageName = null;
private SkeletonTypeEnum skeletonType = SkeletonTypeEnum.COMPARATIVE_EFFECT_STUDY;
private String skeletonVersion = "v0.0.1";
private String createdBy = null;
private String createdDate = null;
private String modifiedBy = null;
private String modifiedDate = null;
private List<AnalysisCohortDefinition> cohortDefinitions = null;
private List<AnalysisConceptSet> conceptSets = null;
private List<ConceptSetCrossReferenceImpl> conceptSetCrossReference = null;
private List<NegativeControlImpl> negativeControls = null;
private Boolean doPositiveControlSynthesis = false;
private PositiveControlSynthesisArgsImpl positiveControlSynthesisArgs = null;
private NegativeControlOutcomeCohortExpressionImpl negativeControlOutcomeCohortDefinition = null;
private NegativeControlExposureCohortExpressionImpl negativeControlExposureCohortDefinition = null;
private EstimationAnalysisSettingsImpl estimationAnalysisSettings = null;
/**
* Identifier for the estimation specification
* @return id
**/
@Override
public Integer getId() {
return id;
}
/**
*
* @param id
*/
public void setId(Integer id) {
this.id = id;
}
/**
* Name for the estimation specification
* @return name
**/
@Override
public String getName() {
return name;
}
/**
*
* @param name
*/
public void setName(String name) {
this.name = name;
}
/**
* The description of the study
* @return description
**/
@Override
public String getDescription() {
return description;
}
/**
*
* @param description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Version number of the specification
* @return version
**/
@Override
public String getVersion() {
return version;
}
/**
*
* @param version
*/
public void setVersion(String version) {
this.version = version;
}
/**
* The organization that produced the specification
* @return organizationName
**/
@Override
public String getOrganizationName() {
return organizationName;
}
/**
*
* @param organizationName
*/
public void setOrganizationName(String organizationName) {
this.organizationName = organizationName;
}
/**
* The name of the R Package for execution
* @return packageName
**/
@Override
public String getPackageName() {
return packageName;
}
/**
*
* @param packageName
*/
public void setPackageName(String packageName) {
this.packageName = packageName;
}
/**
* The base skeleton R package
* @return skeletonType
**/
@Override
public SkeletonTypeEnum getSkeletonType() {
return skeletonType;
}
/**
*
* @param skeletonType
*/
public void setSkeletonType(SkeletonTypeEnum skeletonType) {
this.skeletonType = skeletonType;
}
/**
* The corresponding skeleton version to use
* @return skeletonVersion
**/
@Override
public String getSkeletonVersion() {
return skeletonVersion;
}
/**
*
* @param skeletonVersion
*/
public void setSkeletonVersion(String skeletonVersion) {
this.skeletonVersion = skeletonVersion;
}
/**
* The person who created the specification
* @return createdBy
**/
@Override
public String getCreatedBy() {
return createdBy;
}
/**
*
* @param createdBy
*/
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/**
* The date and time the estimation was first saved
* @return createdDate
**/
@Override
public String getCreatedDate() {
return createdDate;
}
/**
*
* @param createdDate
*/
public void setCreatedDate(String createdDate) {
this.createdDate = createdDate;
}
/**
* The person who updated the specification
* @return modifiedBy
**/
@Override
public String getModifiedBy() {
return modifiedBy;
}
/**
*
* @param modifiedBy
*/
public void setModifiedBy(String modifiedBy) {
this.modifiedBy = modifiedBy;
}
/**
* The date and time the estimation was last saved
* @return modifiedDate
**/
@Override
public String getModifiedDate() {
return modifiedDate;
}
/**
*
* @param modifiedDate
*/
public void setModifiedDate(String modifiedDate) {
this.modifiedDate = modifiedDate;
}
/**
*
* @param cohortDefinitionsItem
* @return
*/
public EstimationAnalysisImpl addCohortDefinitionsItem(AnalysisCohortDefinition cohortDefinitionsItem) {
if (this.cohortDefinitions == null) {
this.cohortDefinitions = new ArrayList<>();
}
this.cohortDefinitions.add(cohortDefinitionsItem);
return this;
}
/**
* Get cohortDefinitions
* @return cohortDefinitions
**/
@Override
public List<AnalysisCohortDefinition> getCohortDefinitions() {
return cohortDefinitions;
}
/**
*
* @param cohortDefinitions
*/
public void setCohortDefinitions(List<AnalysisCohortDefinition> cohortDefinitions) {
this.cohortDefinitions = cohortDefinitions;
}
/**
*
* @param conceptSetsItem
* @return
*/
public EstimationAnalysisImpl addConceptSetsItem(AnalysisConceptSet conceptSetsItem) {
if (this.conceptSets == null) {
this.conceptSets = new ArrayList<>();
}
this.conceptSets.add(conceptSetsItem);
return this;
}
/**
* Get conceptSets
* @return conceptSets
**/
@Override
public List<AnalysisConceptSet> getConceptSets() {
return conceptSets;
}
/**
*
* @param conceptSets
*/
public void setConceptSets(List<AnalysisConceptSet> conceptSets) {
this.conceptSets = conceptSets;
}
/**
*
* @param conceptSetCrossReferenceItem
* @return
*/
public EstimationAnalysisImpl addConceptSetCrossReferenceItem(ConceptSetCrossReferenceImpl conceptSetCrossReferenceItem) {
if (this.conceptSetCrossReference == null) {
this.conceptSetCrossReference = new ArrayList<>();
}
this.conceptSetCrossReference.add(conceptSetCrossReferenceItem);
return this;
}
/**
* Get conceptSetCrossReference
* @return conceptSetCrossReference
**/
@Override
public List<ConceptSetCrossReferenceImpl> getConceptSetCrossReference() {
return conceptSetCrossReference;
}
/**
*
* @param conceptSetCrossReference
*/
public void setConceptSetCrossReference(List<ConceptSetCrossReferenceImpl> conceptSetCrossReference) {
this.conceptSetCrossReference = conceptSetCrossReference;
}
/**
*
* @param negativeControlsItem
* @return
*/
public EstimationAnalysisImpl addNegativeControlsItem(NegativeControlImpl negativeControlsItem) {
if (this.negativeControls == null) {
this.negativeControls = new ArrayList<>();
}
this.negativeControls.add(negativeControlsItem);
return this;
}
/**
* Get negativeControls
* @return negativeControls
**/
@Override
public List<NegativeControlImpl> getNegativeControls() {
return negativeControls;
}
/**
*
* @param negativeControls
*/
public void setNegativeControls(List<NegativeControlImpl> negativeControls) {
this.negativeControls = negativeControls;
}
@Override
public Boolean getDoPositiveControlSynthesis() {
return doPositiveControlSynthesis;
}
/**
*
* @param doPositiveControlSynthesis
*/
public void setDoPositiveControlSynthesis(Boolean doPositiveControlSynthesis) {
this.doPositiveControlSynthesis = doPositiveControlSynthesis;
}
@Override
public PositiveControlSynthesisArgsImpl getPositiveControlSynthesisArgs() {
return positiveControlSynthesisArgs;
}
/**
*
* @param positiveControlSynthesisArgs
*/
public void setPositiveControlSynthesisArgs(PositiveControlSynthesisArgsImpl positiveControlSynthesisArgs) {
this.positiveControlSynthesisArgs = positiveControlSynthesisArgs;
}
@Override
public NegativeControlOutcomeCohortExpressionImpl getNegativeControlOutcomeCohortDefinition() {
return negativeControlOutcomeCohortDefinition;
}
/**
*
* @param negativeControlOutcomeCohortDefinition
*/
public void setNegativeControlOutcomeCohortDefinition(NegativeControlOutcomeCohortExpressionImpl negativeControlOutcomeCohortDefinition) {
this.negativeControlOutcomeCohortDefinition = negativeControlOutcomeCohortDefinition;
}
@Override
public NegativeControlExposureCohortExpressionImpl getNegativeControlExposureCohortDefinition() {
return negativeControlExposureCohortDefinition;
}
/**
*
* @param negativeControlExposureCohortDefinition
*/
public void setNegativeControlExposureCohortDefinition(NegativeControlExposureCohortExpressionImpl negativeControlExposureCohortDefinition) {
this.negativeControlExposureCohortDefinition = negativeControlExposureCohortDefinition;
}
@Override
public EstimationAnalysisSettingsImpl getEstimationAnalysisSettings() {
return estimationAnalysisSettings;
}
/**
*
* @param estimationAnalysisSettings
*/
public void setEstimationAnalysisSettings(EstimationAnalysisSettingsImpl estimationAnalysisSettings) {
this.estimationAnalysisSettings = estimationAnalysisSettings;
}
} | java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlOutcomeCohortExpressionImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlOutcomeCohortExpressionImpl.java | package org.ohdsi.webapi.estimation.specification;
import java.util.ArrayList;
import java.util.List;
import org.ohdsi.analysis.estimation.design.NegativeControlOutcomeCohortExpression;
/**
*
* @author asena5
*/
public class NegativeControlOutcomeCohortExpressionImpl implements NegativeControlOutcomeCohortExpression {
private String occurrenceType = null;
private Boolean detectOnDescendants = null;
private List<String> domains = null;
/**
* The type of occurrence of the event when selecting from the domain. The options are \"All\" or \"First\"
* @return occurrenceType
**/
@Override
public String getOccurrenceType() {
return occurrenceType;
}
/**
*
* @param occurrenceType
*/
public void setOccurrenceType(String occurrenceType) {
this.occurrenceType = occurrenceType;
}
/**
* When true, descendant concepts for the conceptId will be used to detect the exposure/outcome and roll up the occurrence to the conceptId
* @return detectOnDescendants
**/
@Override
public Boolean getDetectOnDescendants() {
return detectOnDescendants;
}
/**
*
* @param detectOnDescendants
*/
public void setDetectOnDescendants(Boolean detectOnDescendants) {
this.detectOnDescendants = detectOnDescendants;
}
/**
*
* @param domainsItem
* @return
*/
public NegativeControlOutcomeCohortExpressionImpl addDomainsItem(String domainsItem) {
if (this.domains == null) {
this.domains = new ArrayList<>();
}
this.domains.add(domainsItem);
return this;
}
/**
* Specifies to the domains to use when evaluating negative control events.
* @return domains
**/
@Override
public List<String> getDomains() {
return domains;
}
/**
*
* @param domains
*/
public void setDomains(List<String> domains) {
this.domains = domains;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/PositiveControlSynthesisArgsImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/PositiveControlSynthesisArgsImpl.java | package org.ohdsi.webapi.estimation.specification;
import java.util.ArrayList;
import java.util.List;
import org.ohdsi.analysis.estimation.design.PositiveControlSynthesisModelTypeEnum;
import org.ohdsi.analysis.estimation.design.PositiveControlSynthesisArgs;
import org.ohdsi.webapi.cyclops.specification.ControlImpl;
import org.ohdsi.webapi.featureextraction.specification.CovariateSettingsImpl;
import org.ohdsi.webapi.cyclops.specification.PriorImpl;
/**
*
* @author asena5
*/
public class PositiveControlSynthesisArgsImpl implements PositiveControlSynthesisArgs {
private PositiveControlSynthesisModelTypeEnum modelType = PositiveControlSynthesisModelTypeEnum.SURVIVAL;
private Integer minOutcomeCountForModel = 100;
private Integer minOutcomeCountForInjection = 25;
private CovariateSettingsImpl covariateSettings = null;
private PriorImpl prior = null;
private ControlImpl control = null;
private Boolean firstExposureOnly = false;
private Integer washoutPeriod = 183;
private Integer riskWindowStart = 0;
private Integer riskWindowEnd = 0;
private Boolean addExposureDaysToEnd = true;
private Boolean firstOutcomeOnly = false;
private Boolean removePeopleWithPriorOutcomes = false;
private Integer maxSubjectsForModel = 100000;
private List<Float> effectSizes = null;
private Float precision = 0.01f;
private Integer outputIdOffset = 1000;
/**
* Can be either \"poisson\" or \"survival\"
* @return modelType
**/
@Override
public PositiveControlSynthesisModelTypeEnum getModelType() {
return modelType;
}
/**
*
* @param modelType
*/
public void setModelType(PositiveControlSynthesisModelTypeEnum modelType) {
this.modelType = modelType;
}
/**
* Minimum number of outcome events required to build a model.
* @return minOutcomeCountForModel
**/
@Override
public Integer getMinOutcomeCountForModel() {
return minOutcomeCountForModel;
}
/**
*
* @param minOutcomeCountForModel
*/
public void setMinOutcomeCountForModel(Integer minOutcomeCountForModel) {
this.minOutcomeCountForModel = minOutcomeCountForModel;
}
/**
* Minimum number of outcome events required to inject a signal.
* @return minOutcomeCountForInjection
**/
@Override
public Integer getMinOutcomeCountForInjection() {
return minOutcomeCountForInjection;
}
/**
*
* @param minOutcomeCountForInjection
*/
public void setMinOutcomeCountForInjection(Integer minOutcomeCountForInjection) {
this.minOutcomeCountForInjection = minOutcomeCountForInjection;
}
/**
* Get covariateSettings
* @return covariateSettings
**/
@Override
public CovariateSettingsImpl getCovariateSettings() {
return covariateSettings;
}
/**
*
* @param covariateSettings
*/
public void setCovariateSettings(CovariateSettingsImpl covariateSettings) {
this.covariateSettings = covariateSettings;
}
/**
* Get prior
* @return prior
**/
@Override
public PriorImpl getPrior() {
return prior;
}
/**
*
* @param prior
*/
public void setPrior(PriorImpl prior) {
this.prior = prior;
}
/**
* Get control
* @return control
**/
@Override
public ControlImpl getControl() {
return control;
}
/**
*
* @param control
*/
public void setControl(ControlImpl control) {
this.control = control;
}
/**
* Should signals be injected only for the first exposure? (ie. assuming an acute effect)
* @return firstExposureOnly
**/
@Override
public Boolean getFirstExposureOnly() {
return firstExposureOnly;
}
/**
*
* @param firstExposureOnly
*/
public void setFirstExposureOnly(Boolean firstExposureOnly) {
this.firstExposureOnly = firstExposureOnly;
}
/**
* Number of days at the start of observation for which no signals will be injected, but will be used to determine whether exposure or outcome is the first one, and for extracting covariates to build the outcome model.
* @return washoutPeriod
**/
@Override
public Integer getWashoutPeriod() {
return washoutPeriod;
}
/**
*
* @param washoutPeriod
*/
public void setWashoutPeriod(Integer washoutPeriod) {
this.washoutPeriod = washoutPeriod;
}
/**
* The start of the risk window relative to the start of the exposure (in days). When 0, risk is assumed to start on the first day of exposure.
* @return riskWindowStart
**/
@Override
public Integer getRiskWindowStart() {
return riskWindowStart;
}
/**
*
* @param riskWindowStart
*/
public void setRiskWindowStart(Integer riskWindowStart) {
this.riskWindowStart = riskWindowStart;
}
/**
* The end of the risk window relative to the start of the exposure. Note that typically the length of exposure is added to this number (when the addExposureDaysToEnd parameter is set to TRUE).
* @return riskWindowEnd
**/
@Override
public Integer getRiskWindowEnd() {
return riskWindowEnd;
}
/**
*
* @param riskWindowEnd
*/
public void setRiskWindowEnd(Integer riskWindowEnd) {
this.riskWindowEnd = riskWindowEnd;
}
/**
* Should length of exposure be added to the risk window?
* @return addExposureDaysToEnd
**/
@Override
public Boolean getAddExposureDaysToEnd() {
return addExposureDaysToEnd;
}
/**
*
* @param addExposureDaysToEnd
*/
public void setAddExposureDaysToEnd(Boolean addExposureDaysToEnd) {
this.addExposureDaysToEnd = addExposureDaysToEnd;
}
/**
* Should only the first outcome per person be considered when modeling the outcome?
* @return firstOutcomeOnly
**/
@Override
public Boolean getFirstOutcomeOnly() {
return firstOutcomeOnly;
}
/**
*
* @param firstOutcomeOnly
*/
public void setFirstOutcomeOnly(Boolean firstOutcomeOnly) {
this.firstOutcomeOnly = firstOutcomeOnly;
}
/**
* Remove people with prior outcomes?
* @return removePeopleWithPriorOutcomes
**/
@Override
public Boolean getRemovePeopleWithPriorOutcomes() {
return removePeopleWithPriorOutcomes;
}
/**
*
* @param removePeopleWithPriorOutcomes
*/
public void setRemovePeopleWithPriorOutcomes(Boolean removePeopleWithPriorOutcomes) {
this.removePeopleWithPriorOutcomes = removePeopleWithPriorOutcomes;
}
/**
* Maximum number of people used to fit an outcome model.
* @return maxSubjectsForModel
**/
@Override
public Integer getMaxSubjectsForModel() {
return maxSubjectsForModel;
}
/**
*
* @param maxSubjectsForModel
*/
public void setMaxSubjectsForModel(Integer maxSubjectsForModel) {
this.maxSubjectsForModel = maxSubjectsForModel;
}
/**
*
* @param effectSizesItem
* @return
*/
public PositiveControlSynthesisArgsImpl addEffectSizesItem(Float effectSizesItem) {
if (this.effectSizes == null) {
this.effectSizes = new ArrayList<>();
}
this.effectSizes.add(effectSizesItem);
return this;
}
/**
* A numeric vector of effect sizes that should be inserted.
* @return effectSizes
**/
@Override
public List<Float> getEffectSizes() {
return effectSizes;
}
/**
*
* @param effectSizes
*/
public void setEffectSizes(List<Float> effectSizes) {
this.effectSizes = effectSizes;
}
/**
* The allowed ratio between target and injected signal size.
* @return precision
**/
@Override
public Float getPrecision() {
return precision;
}
/**
*
* @param precision
*/
public void setPrecision(Float precision) {
this.precision = precision;
}
/**
* What should be the first new outcome ID that is to be created?
* @return outputIdOffset
**/
@Override
public Integer getOutputIdOffset() {
return outputIdOffset;
}
/**
*
* @param outputIdOffset
*/
public void setOutputIdOffset(Integer outputIdOffset) {
this.outputIdOffset = outputIdOffset;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/EstimationAnalysisSettingsImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/EstimationAnalysisSettingsImpl.java | package org.ohdsi.webapi.estimation.specification;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import org.ohdsi.analysis.estimation.design.EstimationAnalysisConst;
import org.ohdsi.analysis.estimation.design.EstimationTypeEnum;
import org.ohdsi.analysis.estimation.design.EstimationAnalysisSettings;
import org.ohdsi.analysis.estimation.design.Settings;
import org.ohdsi.webapi.estimation.comparativecohortanalysis.specification.ComparativeCohortAnalysisSettings;
/**
*
* @author asena5
*/
@JsonSubTypes({
@JsonSubTypes.Type(value = ComparativeCohortAnalysisSettings.class, name = EstimationAnalysisConst.COMPARATIVE_COHORT_ANALYSIS)
})
@JsonIgnoreProperties({"estimationType"})
public abstract class EstimationAnalysisSettingsImpl implements EstimationAnalysisSettings {
/**
*
* @return
*/
@Override
public abstract EstimationTypeEnum getEstimationType();
/**
*
* @return
*/
@Override
public abstract Settings getAnalysisSpecification();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlExposureCohortExpressionImpl.java | src/main/java/org/ohdsi/webapi/estimation/specification/NegativeControlExposureCohortExpressionImpl.java | package org.ohdsi.webapi.estimation.specification;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.ohdsi.analysis.estimation.design.NegativeControlExposureCohortExpression;
/**
* The expression that defines the criteria for inclusion and duration of time for cohorts intended for use as negative control exposures. This model is still under desgin and is a placholder for now.
*/
@JsonIgnoreProperties(ignoreUnknown=true)
public class NegativeControlExposureCohortExpressionImpl implements NegativeControlExposureCohortExpression {
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/controller/StatisticController.java | src/main/java/org/ohdsi/webapi/statistic/controller/StatisticController.java | package org.ohdsi.webapi.statistic.controller;
import com.opencsv.CSVWriter;
import org.ohdsi.webapi.statistic.dto.AccessTrendDto;
import org.ohdsi.webapi.statistic.dto.AccessTrendsDto;
import org.ohdsi.webapi.statistic.dto.EndpointDto;
import org.ohdsi.webapi.statistic.dto.SourceExecutionDto;
import org.ohdsi.webapi.statistic.dto.SourceExecutionsDto;
import org.ohdsi.webapi.statistic.service.StatisticService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import javax.ws.rs.Consumes;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Controller
@Path("/statistic/")
public class StatisticController {
private static final Logger log = LoggerFactory.getLogger(StatisticController.class);
private StatisticService service;
@Value("${audit.trail.enabled}")
private boolean auditTrailEnabled;
public enum ResponseFormat {
CSV, JSON
}
private static final List<String[]> EXECUTION_STATISTICS_CSV_RESULT_HEADER = new ArrayList<String[]>() {{
add(new String[]{"Date", "Source", "Execution Type"});
}};
private static final List<String[]> EXECUTION_STATISTICS_CSV_RESULT_HEADER_WITH_USER_ID = new ArrayList<String[]>() {{
add(new String[]{"Date", "Source", "Execution Type", "User ID"});
}};
private static final List<String[]> ACCESS_TRENDS_CSV_RESULT_HEADER = new ArrayList<String[]>() {{
add(new String[]{"Date", "Endpoint"});
}};
private static final List<String[]> ACCESS_TRENDS_CSV_RESULT_HEADER_WITH_USER_ID = new ArrayList<String[]>() {{
add(new String[]{"Date", "Endpoint", "User ID"});
}};
@Autowired
public StatisticController(StatisticService service) {
this.service = service;
}
/**
* Returns execution statistics
* @param executionStatisticsRequest - filter settings for statistics
*/
@POST
@Path("/executions")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response executionStatistics(ExecutionStatisticsRequest executionStatisticsRequest) {
if (!auditTrailEnabled) {
throw new InternalServerErrorException("Audit Trail functionality should be enabled (audit.trail.enabled) to serve this endpoint");
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
boolean showUserInformation = executionStatisticsRequest.isShowUserInformation();
SourceExecutionsDto sourceExecutions = service.getSourceExecutions(LocalDate.parse(executionStatisticsRequest.getStartDate(), formatter),
LocalDate.parse(executionStatisticsRequest.getEndDate(), formatter), executionStatisticsRequest.getSourceKey(), showUserInformation);
if (ResponseFormat.CSV.equals(executionStatisticsRequest.getResponseFormat())) {
return prepareExecutionResultResponse(sourceExecutions.getExecutions(), "execution_statistics.zip", showUserInformation);
} else {
return Response.ok(sourceExecutions).build();
}
}
/**
* Returns access trends statistics
* @param accessTrendsStatisticsRequest - filter settings for statistics
*/
@POST
@Path("/accesstrends")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response accessStatistics(AccessTrendsStatisticsRequest accessTrendsStatisticsRequest) {
if (!auditTrailEnabled) {
throw new InternalServerErrorException("Audit Trail functionality should be enabled (audit.trail.enabled) to serve this endpoint");
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
boolean showUserInformation = accessTrendsStatisticsRequest.isShowUserInformation();
AccessTrendsDto trends = service.getAccessTrends(LocalDate.parse(accessTrendsStatisticsRequest.getStartDate(), formatter),
LocalDate.parse(accessTrendsStatisticsRequest.getEndDate(), formatter), accessTrendsStatisticsRequest.getEndpoints(), showUserInformation);
if (ResponseFormat.CSV.equals(accessTrendsStatisticsRequest.getResponseFormat())) {
return prepareAccessTrendsResponse(trends.getTrends(), "execution_trends.zip", showUserInformation);
} else {
return Response.ok(trends).build();
}
}
private Response prepareExecutionResultResponse(List<SourceExecutionDto> executions, String filename, boolean showUserInformation) {
List<String[]> data = executions.stream()
.map(execution -> showUserInformation
? new String[]{execution.getExecutionDate(), execution.getSourceName(), execution.getExecutionName(), execution.getUserId()}
: new String[]{execution.getExecutionDate(), execution.getSourceName(), execution.getExecutionName()}
)
.collect(Collectors.toList());
return prepareResponse(data, filename, showUserInformation ? EXECUTION_STATISTICS_CSV_RESULT_HEADER_WITH_USER_ID : EXECUTION_STATISTICS_CSV_RESULT_HEADER);
}
private Response prepareAccessTrendsResponse(List<AccessTrendDto> trends, String filename, boolean showUserInformation) {
List<String[]> data = trends.stream()
.map(trend -> showUserInformation
? new String[]{trend.getExecutionDate().toString(), trend.getEndpointName(), trend.getUserID()}
: new String[]{trend.getExecutionDate().toString(), trend.getEndpointName()}
)
.collect(Collectors.toList());
return prepareResponse(data, filename, showUserInformation ? ACCESS_TRENDS_CSV_RESULT_HEADER_WITH_USER_ID : ACCESS_TRENDS_CSV_RESULT_HEADER);
}
private Response prepareResponse(List<String[]> data, String filename, List<String[]> header) {
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
StringWriter sw = new StringWriter();
CSVWriter csvWriter = new CSVWriter(sw, ',', CSVWriter.DEFAULT_QUOTE_CHARACTER, CSVWriter.DEFAULT_ESCAPE_CHARACTER)) {
csvWriter.writeAll(header);
csvWriter.writeAll(data);
csvWriter.flush();
baos.write(sw.getBuffer().toString().getBytes());
return Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", String.format("attachment; filename=\"%s\"", filename))
.build();
} catch (Exception ex) {
log.error("An error occurred while building a response");
throw new RuntimeException(ex);
}
}
public static final class ExecutionStatisticsRequest {
// Format - yyyy-MM-dd
String startDate;
// Format - yyyy-MM-dd
String endDate;
String sourceKey;
ResponseFormat responseFormat;
boolean showUserInformation;
public String getStartDate() {
return startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
public String getEndDate() {
return endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public String getSourceKey() {
return sourceKey;
}
public void setSourceKey(String sourceKey) {
this.sourceKey = sourceKey;
}
public ResponseFormat getResponseFormat() {
return responseFormat;
}
public void setResponseFormat(ResponseFormat responseFormat) {
this.responseFormat = responseFormat;
}
public boolean isShowUserInformation() {
return showUserInformation;
}
public void setShowUserInformation(boolean showUserInformation) {
this.showUserInformation = showUserInformation;
}
}
public static final class AccessTrendsStatisticsRequest {
// Format - yyyy-MM-dd
String startDate;
// Format - yyyy-MM-dd
String endDate;
// Key - method (POST, GET)
// Value - endpoint ("{}" can be used as a placeholder, will be converted to ".*" in regular expression)
List<EndpointDto> endpoints;
ResponseFormat responseFormat;
boolean showUserInformation;
public String getStartDate() {
return startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
public String getEndDate() {
return endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public List<EndpointDto> getEndpoints() {
return endpoints;
}
public void setEndpoints(List<EndpointDto> endpoints) {
this.endpoints = endpoints;
}
public ResponseFormat getResponseFormat() {
return responseFormat;
}
public void setResponseFormat(ResponseFormat responseFormat) {
this.responseFormat = responseFormat;
}
public boolean isShowUserInformation() {
return showUserInformation;
}
public void setShowUserInformation(boolean showUserInformation) {
this.showUserInformation = showUserInformation;
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/dto/SourceExecutionDto.java | src/main/java/org/ohdsi/webapi/statistic/dto/SourceExecutionDto.java | package org.ohdsi.webapi.statistic.dto;
public class SourceExecutionDto {
private String sourceName;
private String executionName;
private String executionDate;
private String userId;
public SourceExecutionDto(String sourceName, String executionName, String executionDate, String userId) {
this.sourceName = sourceName;
this.executionName = executionName;
this.executionDate = executionDate;
this.userId = userId;
}
public String getSourceName() {
return sourceName;
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public String getExecutionName() {
return executionName;
}
public void setExecutionName(String executionName) {
this.executionName = executionName;
}
public String getExecutionDate() {
return executionDate;
}
public void setExecutionDate(String executionDate) {
this.executionDate = executionDate;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/dto/AccessTrendDto.java | src/main/java/org/ohdsi/webapi/statistic/dto/AccessTrendDto.java | package org.ohdsi.webapi.statistic.dto;
public class AccessTrendDto {
private String endpointName;
private String executionDate;
private String userID;
public AccessTrendDto(String endpointName, String executionDate, String userID) {
this.endpointName = endpointName;
this.executionDate = executionDate;
this.userID = userID;
}
public String getEndpointName() {
return endpointName;
}
public void setEndpointName(String endpointName) {
this.endpointName = endpointName;
}
public String getExecutionDate() {
return executionDate;
}
public void setExecutionDate(String executionDate) {
this.executionDate = executionDate;
}
public String getUserID() {
return userID;
}
public void setUserID(String userID) {
this.userID = userID;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/dto/AccessTrendsDto.java | src/main/java/org/ohdsi/webapi/statistic/dto/AccessTrendsDto.java | package org.ohdsi.webapi.statistic.dto;
import java.util.ArrayList;
import java.util.List;
public class AccessTrendsDto {
private List<AccessTrendDto> trends = new ArrayList<>();
public AccessTrendsDto(List<AccessTrendDto> trends) {
this.trends = trends;
}
public List<AccessTrendDto> getTrends() {
return trends;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/dto/SourceExecutionsDto.java | src/main/java/org/ohdsi/webapi/statistic/dto/SourceExecutionsDto.java | package org.ohdsi.webapi.statistic.dto;
import java.util.ArrayList;
import java.util.List;
public class SourceExecutionsDto {
private List<SourceExecutionDto> executions = new ArrayList<>();
public SourceExecutionsDto(List<SourceExecutionDto> executions) {
this.executions = executions;
}
public List<SourceExecutionDto> getExecutions() {
return executions;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/dto/EndpointDto.java | src/main/java/org/ohdsi/webapi/statistic/dto/EndpointDto.java | package org.ohdsi.webapi.statistic.dto;
public class EndpointDto {
String method;
String urlPattern;
String userId;
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public String getUrlPattern() {
return urlPattern;
}
public void setUrlPattern(String urlPattern) {
this.urlPattern = urlPattern;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/statistic/service/StatisticService.java | src/main/java/org/ohdsi/webapi/statistic/service/StatisticService.java | package org.ohdsi.webapi.statistic.service;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.ohdsi.webapi.statistic.dto.AccessTrendDto;
import org.ohdsi.webapi.statistic.dto.AccessTrendsDto;
import org.ohdsi.webapi.statistic.dto.EndpointDto;
import org.ohdsi.webapi.statistic.dto.SourceExecutionDto;
import org.ohdsi.webapi.statistic.dto.SourceExecutionsDto;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Service
public class StatisticService {
protected final Logger LOG = LoggerFactory.getLogger(getClass());
@Value("${audit.trail.log.file}")
private String absoluteLogFileName = "/tmp/atlas/audit/audit.log";
private String logFileName;
@Value("${audit.trail.log.file.pattern}")
private String absoluteLogFileNamePattern = "/tmp/atlas/audit/audit-%d{yyyy-MM-dd}-%i.log";
private String logFileNamePattern;
private SimpleDateFormat logFileDateFormat;
private int logFileDateStart;
private int logFileDateEnd;
// Some execution can have duplicate logs with different parameters
// Duplicate log entries can exist because sometimes ccontroller methods are called from other controller methods
// These regular expressions let us to choose only needed log entries
private static final Pattern COHORT_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*GET\\s/WebAPI/cohortdefinition/\\d+/generate/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final Pattern CHARACTERIZATION_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*POST\\s/WebAPI/cohort-characterization/\\d+/generation/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final Pattern PATHWAY_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*POST\\s/WebAPI/pathway-analysis/\\d+/generation/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final Pattern IR_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*GET\\s/WebAPI/ir/\\d+/execute/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final Pattern PLE_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*POST\\s/WebAPI/estimation/\\d+/generation/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final Pattern PLP_GENERATION_REGEXP =
Pattern.compile("^.*(\\d{4}-\\d{2}-\\d{2})T\\d{2}:\\d{2}:\\d{2}.*-\\s-\\s-\\s([\\w-]+)\\s.*POST\\s/WebAPI/prediction/\\d+/generation/(.+)\\s-\\s.*status::String,startDate::Date,endDate::Date.*$");
private static final String ENDPOINT_REGEXP =
"^.*(\\d{4}-\\d{2}-\\d{2})T(\\d{2}:\\d{2}:\\d{2}).*-\\s-\\s-\\s([\\w-]+)\\s.*-\\s({METHOD_PLACEHOLDER}\\s.*{ENDPOINT_PLACEHOLDER})\\s-.*$";
private static final String COHORT_GENERATION_NAME = "Cohort Generation";
private static final String CHARACTERIZATION_GENERATION_NAME = "Characterization Generation";
private static final String PATHWAY_GENERATION_NAME = "Pathway Generation";
private static final String IR_GENERATION_NAME = "Incidence Rates Generation";
private static final String PLE_GENERATION_NAME = "Estimation Generation";
private static final String PLP_GENERATION_NAME = "Prediction Generation";
private static final Map<String, Pattern> patternMap = new HashMap<>();
static {
patternMap.put(COHORT_GENERATION_NAME, COHORT_GENERATION_REGEXP);
patternMap.put(CHARACTERIZATION_GENERATION_NAME, CHARACTERIZATION_GENERATION_REGEXP);
patternMap.put(PATHWAY_GENERATION_NAME, PATHWAY_GENERATION_REGEXP);
patternMap.put(IR_GENERATION_NAME, IR_GENERATION_REGEXP);
patternMap.put(PLE_GENERATION_NAME, PLE_GENERATION_REGEXP);
patternMap.put(PLP_GENERATION_NAME, PLP_GENERATION_REGEXP);
}
public StatisticService() {
logFileName = new File(absoluteLogFileName).getName();
logFileNamePattern = new File(absoluteLogFileNamePattern).getName();
// Pattern contains "%d{yyyy-MM-dd}". "%d" will not be contained in real log file name
int placeHolderPrefixLength = 3;
logFileDateStart = logFileNamePattern.indexOf("{") - placeHolderPrefixLength + 1;
logFileDateEnd = logFileNamePattern.indexOf("}") - placeHolderPrefixLength;
String dateString = logFileNamePattern.substring(logFileDateStart + placeHolderPrefixLength,
logFileDateEnd + placeHolderPrefixLength);
logFileDateFormat = new SimpleDateFormat(dateString);
}
public SourceExecutionsDto getSourceExecutions(LocalDate startDate, LocalDate endDate, String sourceKey, boolean showUserInformation) {
Set<Path> paths = getLogPaths(startDate, endDate);
List<SourceExecutionDto> executions = paths.stream()
.flatMap(path -> extractSourceExecutions(path, sourceKey, showUserInformation).stream())
.collect(Collectors.toList());
return new SourceExecutionsDto(executions);
}
public AccessTrendsDto getAccessTrends(LocalDate startDate, LocalDate endDate, List<EndpointDto> endpoints, boolean showUserInformation) {
Set<Path> paths = getLogPaths(startDate, endDate);
List<AccessTrendDto> trends = paths.stream()
.flatMap(path -> extractAccessTrends(path, endpoints, showUserInformation).stream())
.collect(Collectors.toList());
return new AccessTrendsDto(trends);
}
private List<SourceExecutionDto> extractSourceExecutions(Path path, String sourceKey, boolean showUserInformation) {
try (Stream<String> stream = Files.lines(path)) {
return stream
.map(str -> getMatchedExecution(str, sourceKey, showUserInformation))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toList());
} catch (IOException e) {
LOG.error("Error parsing log file {}. {}", path.getFileName(), e);
throw new RuntimeException(e);
}
}
private List<AccessTrendDto> extractAccessTrends(Path path, List<EndpointDto> endpoints, boolean showUserInformation) {
List<Pattern> patterns = endpoints.stream()
.map(endpointPair -> {
String method = endpointPair.getMethod();
String endpoint = endpointPair.getUrlPattern().replaceAll("\\{\\}", ".*");
String regexpStr = ENDPOINT_REGEXP.replace("{METHOD_PLACEHOLDER}", method);
regexpStr = regexpStr.replace("{ENDPOINT_PLACEHOLDER}", endpoint);
return Pattern.compile(regexpStr);
})
.collect(Collectors.toList());
try (Stream<String> stream = Files.lines(path)) {
return stream
.map(str -> {
return patterns.stream()
.map(pattern -> pattern.matcher(str))
.filter(matcher -> matcher.matches())
.map(matcher -> new AccessTrendDto(matcher.group(4), matcher.group(1), showUserInformation ? matcher.group(3) : null))
.findFirst();
})
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toList());
} catch (IOException e) {
LOG.error("Error parsing log file {}. {}", path.getFileName(), e);
throw new RuntimeException(e);
}
}
private Optional<SourceExecutionDto> getMatchedExecution(String str, String sourceKey, boolean showUserInformation) {
return patternMap.entrySet().stream()
.map(entry -> new ImmutablePair<>(entry.getKey(), entry.getValue().matcher(str)))
.filter(pair -> pair.getValue().matches())
.filter(pair -> sourceKey == null || (sourceKey != null && sourceKey.equals(pair.getValue().group(3))))
.map(pair -> new SourceExecutionDto(pair.getValue().group(3), pair.getKey(), pair.getValue().group(1), showUserInformation ? pair.getValue().group(2) : null))
.findFirst();
}
private Set<Path> getLogPaths(LocalDate startDate, LocalDate endDate) {
String folderPath = new File(absoluteLogFileName).getParentFile().getAbsolutePath();
try (Stream<Path> stream = Files.list(Paths.get(folderPath))) {
return stream
.filter(file -> !Files.isDirectory(file))
.filter(this::isValidLogFile)
.filter(file -> isLogInDateRange(file, startDate, endDate))
.map(Path::toAbsolutePath)
.collect(Collectors.toSet());
} catch (IOException e) {
LOG.error("Error getting list of log files", e);
throw new RuntimeException(e);
}
}
private boolean isValidLogFile(Path path) {
return path.getFileName().toString().endsWith(".log");
}
private boolean isLogInDateRange(Path path, LocalDate startDate, LocalDate endDate) {
if (startDate == null && endDate == null) {
return true;
}
LocalDate logDate = getFileDate(path.getFileName());
if ((startDate != null && logDate.isBefore(startDate))
|| (endDate != null && logDate.isAfter(endDate))) {
return false;
}
return true;
}
private LocalDate getFileDate(Path path) {
String fileName = path.toString();
if (logFileName.equals(fileName)) {
return LocalDate.now();
}
try {
String dateStr = fileName.substring(logFileDateStart, logFileDateEnd);
return logFileDateFormat.parse(dateStr).toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
} catch (ParseException | IndexOutOfBoundsException e) {
// If we cannot check the date of a file, then assume that it is a file for the current date
return LocalDate.now();
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/NotificationService.java | src/main/java/org/ohdsi/webapi/job/NotificationService.java | package org.ohdsi.webapi.job;
import org.springframework.batch.core.BatchStatus;
import java.util.Date;
import java.util.List;
public interface NotificationService {
List<JobExecutionInfo> findLastJobs(List<BatchStatus> hideStatuses);
List<JobExecutionInfo> findRefreshCacheLastJobs();
Date getLastViewedTime() throws Exception;
void setLastViewedTime(Date stamp) throws Exception;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobUtils.java | src/main/java/org/ohdsi/webapi/job/JobUtils.java | package org.ohdsi.webapi.job;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
import org.ohdsi.webapi.Constants;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameter.ParameterType;
import org.springframework.batch.core.JobParameters;
/**
*
*/
public final class JobUtils {
// Arrays.asList was used to provide easy extending ability in future
private static List<String> PROTECTED_PARAMS = Arrays.asList(Constants.Params.UPDATE_PASSWORD);
public static JobInstanceResource toJobInstanceResource(final JobInstance jobInstance) {
final JobInstanceResource job = new JobInstanceResource(jobInstance.getId());
job.setName(jobInstance.getJobName());
return job;
}
public static JobExecutionResource toJobExecutionResource(final JobExecution jobExecution) {
final JobExecutionResource execution = new JobExecutionResource(
toJobInstanceResource(jobExecution.getJobInstance()), jobExecution.getId());
execution.setStatus(jobExecution.getStatus().name());
execution.setStartDate(jobExecution.getStartTime());
execution.setEndDate(jobExecution.getEndTime());
execution.setExitStatus(jobExecution.getExitStatus().getExitCode());
JobParameters jobParams = jobExecution.getJobParameters();
if (jobParams != null) {
Map<String, JobParameter> params = jobParams.getParameters();
if (params != null && !params.isEmpty()) {
Map<String, Object> jobParametersResource = new HashMap<String, Object>();
Set<String> keys = params.keySet().stream()
.filter(k -> !PROTECTED_PARAMS.contains(k))
.collect(Collectors.toSet());
for (String key : keys) {
jobParametersResource.put(key, params.get(key).getValue());
}
execution.setJobParametersResource(jobParametersResource);
}
}
return execution;
}
/**
* Create List of JobExecutionResource objects containing job parameters.
* <p>
* See /resources/job/sql/jobExecutions.sql for ResultSet expectations.
*
* @param rs
* @return
* @throws SQLException
*/
public static List<JobExecutionResource> toJobExecutionResource(final ResultSet rs) throws SQLException {
//TODO order by executionId
List<JobExecutionResource> jobs = new ArrayList<>();
JobExecutionResource jobexec = null;
Map<String, Object> map = new HashMap<>();
while (rs.next()) {
Long id = rs.getLong(1);
if (jobexec != null) {//possible continuation
if (!jobexec.getExecutionId().equals(id)) {
//no continuation
jobexec.setJobParametersResource(map);
jobs.add(jobexec);
jobexec = null;
map = null;
}
}
if (jobexec == null) {
map = new HashMap<String, Object>();
//JobParameters jobParameters = getJobParameters(id);
JobInstance jobInstance = new JobInstance(rs.getLong(10), rs.getString(11));
JobExecution jobExecution = new JobExecution(jobInstance, null);//jobParameters);
jobExecution.setId(id);
jobExecution.setStartTime(rs.getTimestamp(2));
jobExecution.setEndTime(rs.getTimestamp(3));
jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4)));
jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6)));
jobExecution.setCreateTime(rs.getTimestamp(7));
jobExecution.setLastUpdated(rs.getTimestamp(8));
jobExecution.setVersion(rs.getInt(9));
jobexec = toJobExecutionResource(jobExecution);
}
//parameters starts at 12
String key = rs.getString(12);
if (!PROTECTED_PARAMS.contains(key)) {
ParameterType type = ParameterType.valueOf(rs.getString(13));
JobParameter value = null;
switch (type) {
case STRING: {
value = new JobParameter(rs.getString(14), rs.getString(18).equalsIgnoreCase("Y"));
break;
}
case LONG: {
value = new JobParameter(rs.getLong(16), rs.getString(18).equalsIgnoreCase("Y"));
break;
}
case DOUBLE: {
value = new JobParameter(rs.getDouble(17), rs.getString(18).equalsIgnoreCase("Y"));
break;
}
case DATE: {
value = new JobParameter(rs.getTimestamp(15), rs.getString(18).equalsIgnoreCase("Y"));
break;
}
}
// No need to assert that value is not null because it's an enum
map.put(key, value.getValue());//value);
}
}
if (jobexec != null && jobexec.getExecutionId() != null) {
jobexec.setJobParametersResource(map);
jobs.add(jobexec);
}
return jobs;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobTemplate.java | src/main/java/org/ohdsi/webapi/job/JobTemplate.java | package org.ohdsi.webapi.job;
import org.ohdsi.webapi.shiro.management.Security;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.step.tasklet.Tasklet;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import static org.ohdsi.webapi.Constants.Params.JOB_AUTHOR;
import static org.ohdsi.webapi.Constants.Params.JOB_START_TIME;
import static org.ohdsi.webapi.Constants.SYSTEM_USER;
import static org.ohdsi.webapi.Constants.WARM_CACHE;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
*/
public class JobTemplate {
private static final Logger log = LoggerFactory.getLogger(JobTemplate.class);
private final JobLauncher jobLauncher;
private final JobBuilderFactory jobBuilders;
private final StepBuilderFactory stepBuilders;
private final Security security;
public JobTemplate(final JobLauncher jobLauncher, final JobBuilderFactory jobBuilders,
final StepBuilderFactory stepBuilders, final Security security) {
this.jobLauncher = jobLauncher;
this.jobBuilders = jobBuilders;
this.stepBuilders = stepBuilders;
this.security = security;
}
public JobExecutionResource launch(final Job job, JobParameters jobParameters) throws WebApplicationException {
JobExecution exec;
try {
JobParametersBuilder builder = new JobParametersBuilder(jobParameters);
builder.addLong(JOB_START_TIME, System.currentTimeMillis());
if (jobParameters.getString(JOB_AUTHOR) == null) {
builder.addString(JOB_AUTHOR, security.getSubject());
}
jobParameters = builder.toJobParameters();
exec = this.jobLauncher.run(job, jobParameters);
if (log.isDebugEnabled()) {
log.debug("JobExecution queued: {}", exec);
}
} catch (final JobExecutionAlreadyRunningException e) {
throw new WebApplicationException(e, Response.status(Status.CONFLICT).entity(whitelist(e)).build());
} catch (final Exception e) {
throw new WebApplicationException(e, Response.status(Status.INTERNAL_SERVER_ERROR).entity(whitelist(e)).build());
}
return JobUtils.toJobExecutionResource(exec);
}
public JobExecutionResource launchTasklet(final String jobName, final String stepName, final Tasklet tasklet,
JobParameters jobParameters) throws WebApplicationException {
JobExecution exec;
try {
//TODO Consider JobParametersIncrementer
jobParameters = new JobParametersBuilder(jobParameters)
.addLong(JOB_START_TIME, System.currentTimeMillis())
.addString(JOB_AUTHOR, getAuthorForTasklet(jobName))
.toJobParameters();
//TODO Consider our own check (since adding unique JobParameter) to see if related-job is running and throw "already running"
final Step step = this.stepBuilders.get(stepName).tasklet(tasklet).allowStartIfComplete(true).build();
final Job job = this.jobBuilders.get(jobName).start(step).build();
exec = this.jobLauncher.run(job, jobParameters);
} catch (final JobExecutionAlreadyRunningException e) {
throw new WebApplicationException(Response.status(Status.CONFLICT).entity(whitelist(e.getMessage())).build());
} catch (final JobInstanceAlreadyCompleteException e) {
throw new WebApplicationException(Response.status(Status.CONFLICT).entity(whitelist(e.getMessage())).build());
} catch (final Exception e) {
throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(whitelist(e.getMessage())).build());
}
return JobUtils.toJobExecutionResource(exec);
}
private String getAuthorForTasklet(final String jobName) {
return WARM_CACHE.equals(jobName) ? SYSTEM_USER : security.getSubject();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/NotificationController.java | src/main/java/org/ohdsi/webapi/job/NotificationController.java | package org.ohdsi.webapi.job;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.BatchStatus;
import org.springframework.core.convert.support.GenericConversionService;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
/**
* REST Services related to working with the system notifications
*
* @summary Notifications
*/
@Path("/notifications")
@Controller
@Transactional
public class NotificationController {
private static final Logger log = LoggerFactory.getLogger(NotificationController.class);
private final NotificationService service;
private final GenericConversionService conversionService;
NotificationController(final NotificationService service, GenericConversionService conversionService) {
this.service = service;
this.conversionService = conversionService;
}
/**
* Get the list of notifications
*
* @summary Get all notifications
* @param hideStatuses Used to filter statuses - passes as a comma-delimited
* list
* @param refreshJobs Boolean - when true, it will refresh the cache
* of notifications
* @return
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Transactional(readOnly = true)
public List<JobExecutionResource> list(
@QueryParam("hide_statuses") String hideStatuses,
@DefaultValue("FALSE") @QueryParam("refreshJobs") Boolean refreshJobs) {
List<BatchStatus> statuses = new ArrayList<>();
if (StringUtils.isNotEmpty(hideStatuses)) {
for (String status : hideStatuses.split(",")) {
try {
statuses.add(BatchStatus.valueOf(status));
} catch (IllegalArgumentException e) {
log.warn("Invalid argument passed as batch status: {}", status);
}
}
}
List<JobExecutionInfo> executionInfos;
if (refreshJobs) {
executionInfos = service.findRefreshCacheLastJobs();
} else {
executionInfos = service.findLastJobs(statuses);
}
return executionInfos.stream().map(this::toDTO).collect(Collectors.toList());
}
/**
* Gets the date when notifications were last viewed
*
* @summary Get notification last viewed date
* @return The date when notifications were last viewed
*/
@GET
@Path("/viewed")
@Produces(MediaType.APPLICATION_JSON)
@Transactional(readOnly = true)
public Date getLastViewedTime() {
try {
return service.getLastViewedTime();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Sets the date when notifications were last viewed
*
* @summary Set notification last viewed date
* @param stamp
*/
@POST
@Path("/viewed")
@Produces(MediaType.APPLICATION_JSON)
public void setLastViewedTime(Date stamp) {
try {
service.setLastViewedTime(stamp);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private JobExecutionResource toDTO(JobExecutionInfo entity) {
return conversionService.convert(entity, JobExecutionResource.class);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java | src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java | package org.ohdsi.webapi.job;
import java.util.Date;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
*
*/
public class JobExecutionResource {
@JsonProperty("status")
private String status;
@JsonProperty("startDate")
private Date startDate;
@JsonProperty("endDate")
private Date endDate;
@JsonProperty("exitStatus")
private String exitStatus;
@JsonProperty("executionId")
private Long executionId;
@JsonProperty("jobInstance")
private JobInstanceResource jobInstanceResource;
@JsonProperty("jobParameters")
private Map<String, Object> jobParametersResource;
@JsonProperty("ownerType")
private JobOwnerType ownerType;
public JobExecutionResource() {
//needed for json deserialization
}
public JobExecutionResource(final JobInstanceResource jobInstanceResource, final Long executionId) {
this.jobInstanceResource = jobInstanceResource;
this.executionId = executionId;
}
/**
* @return the status
*/
public String getStatus() {
return this.status;
}
/**
* @param status the status to set
*/
public void setStatus(final String status) {
this.status = status;
}
/**
* @return the startDate
*/
public Date getStartDate() {
return this.startDate;
}
/**
* @param startDate the startDate to set
*/
public void setStartDate(final Date startDate) {
this.startDate = startDate;
}
/**
* @return the endDate
*/
public Date getEndDate() {
return this.endDate;
}
/**
* @param endDate the endDate to set
*/
public void setEndDate(final Date endDate) {
this.endDate = endDate;
}
/**
* @return the exitStatus
*/
public String getExitStatus() {
return this.exitStatus;
}
/**
* @param exitStatus the exitStatus to set
*/
public void setExitStatus(final String exitStatus) {
this.exitStatus = exitStatus;
}
/**
* @return the executionId
*/
public Long getExecutionId() {
return this.executionId;
}
/**
* @return the jobInstanceResource
*/
public JobInstanceResource getJobInstanceResource() {
return this.jobInstanceResource;
}
/**
* Auto generated method comment
*
* @param map
*/
public void setJobParametersResource(Map<String, Object> map) {
this.jobParametersResource = map;
}
public JobOwnerType getOwnerType() {
return ownerType;
}
public void setOwnerType(JobOwnerType ownerType) {
this.ownerType = ownerType;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobOwnerType.java | src/main/java/org/ohdsi/webapi/job/JobOwnerType.java | package org.ohdsi.webapi.job;
public enum JobOwnerType {
ALL_JOB, USER_JOB;
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobExecutionToDTOConverter.java | src/main/java/org/ohdsi/webapi/job/JobExecutionToDTOConverter.java | package org.ohdsi.webapi.job;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter;
import org.ohdsi.webapi.executionengine.controller.ScriptExecutionController;
import org.ohdsi.webapi.executionengine.job.RunExecutionEngineTasklet;
import org.ohdsi.webapi.executionengine.service.ScriptExecutionService;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.stereotype.Component;
import java.util.Map;
import java.util.stream.Collectors;
@Component
public class JobExecutionToDTOConverter extends BaseConversionServiceAwareConverter<JobExecutionInfo, JobExecutionResource> {
private final ScriptExecutionService scriptExecutionService;
private final JobExplorer jobExplorer;
public JobExecutionToDTOConverter(ScriptExecutionService scriptExecutionService, JobExplorer jobExplorer) {
this.scriptExecutionService = scriptExecutionService;
this.jobExplorer = jobExplorer;
}
@Override
protected JobExecutionResource createResultObject(JobExecutionInfo entity) {
final JobExecution execution = entity.getJobExecution();
final JobInstance instance = execution.getJobInstance();
final JobInstanceResource instanceResource = new JobInstanceResource(instance.getInstanceId(), instance.getJobName());
return new JobExecutionResource(instanceResource, entity.getJobExecution().getId());
}
@Override
public JobExecutionResource convert(JobExecutionInfo entity) {
final JobExecutionResource result = createResultObject(entity);
final JobExecution execution = entity.getJobExecution();
final boolean isScriptExecution = execution.getJobParameters().getString(ScriptExecutionController.SCRIPT_TYPE) != null;
if(isScriptExecution) {
final JobExecution e = jobExplorer.getJobExecution(execution.getJobId());
final Object executionId = e.getExecutionContext().get(RunExecutionEngineTasklet.SCRIPT_ID);
result.setStatus(executionId instanceof Long ? scriptExecutionService.getExecutionStatus((long) executionId) : execution.getStatus().name());
} else {
result.setStatus(execution.getStatus().name());
}
result.setExitStatus(execution.getExitStatus().getExitCode());
result.setStartDate(execution.getStartTime());
result.setEndDate(execution.getEndTime());
result.setJobParametersResource(
execution.getJobParameters().getParameters().entrySet()
.stream()
.filter(p -> Constants.ALLOWED_JOB_EXECUTION_PARAMETERS.contains(p.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getValue() != null ? e.getValue().getValue() : "null")));
result.setOwnerType(entity.getOwnerType());
return result;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/NotificationServiceImpl.java | src/main/java/org/ohdsi/webapi/job/NotificationServiceImpl.java | package org.ohdsi.webapi.job;
import org.ohdsi.webapi.Constants;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.PermissionManager;
import org.springframework.batch.admin.service.SearchableJobExecutionDao;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiFunction;
import static org.ohdsi.webapi.Constants.Params.SOURCE_KEY;
@Service
public class NotificationServiceImpl implements NotificationService {
private static final int MAX_SIZE = 10;
private static final int PAGE_SIZE = MAX_SIZE * 10;
private static final List<String> WHITE_LIST = new ArrayList<>();
private static final List<String> FOLDING_KEYS = new ArrayList<>();
private final SearchableJobExecutionDao jobExecutionDao;
private final PermissionManager permissionManager;
private final UserRepository userRepository;
@Value("#{!'${security.provider}'.equals('DisabledSecurity')}")
private boolean securityEnabled;
public NotificationServiceImpl(SearchableJobExecutionDao jobExecutionDao, List<GeneratesNotification> whiteList, PermissionManager permissionManager, UserRepository userRepository) {
this.jobExecutionDao = jobExecutionDao;
this.permissionManager = permissionManager;
this.userRepository = userRepository;
whiteList.forEach(g -> {
WHITE_LIST.add(g.getJobName());
FOLDING_KEYS.add(g.getExecutionFoldingKey());
});
// Folding key for warming source key job
FOLDING_KEYS.add(SOURCE_KEY);
}
@Override
public List<JobExecutionInfo> findLastJobs(List<BatchStatus> hideStatuses) {
return findJobs(hideStatuses, MAX_SIZE, false);
}
@Override
public List<JobExecutionInfo> findRefreshCacheLastJobs() {
return findJobs(Collections.emptyList(), MAX_SIZE, true);
}
public List<JobExecutionInfo> findJobs(List<BatchStatus> hideStatuses, int maxSize, boolean refreshJobsOnly) {
BiFunction<JobExecutionInfo, JobExecutionInfo, JobExecutionInfo> mergeFunction = (x, y) -> {
final Date xStartTime = x != null ? x.getJobExecution().getStartTime() : null;
final Date yStartTime = y != null ? y.getJobExecution().getStartTime() : null;
return xStartTime != null ?
yStartTime != null ?
xStartTime.after(yStartTime) ? x
: y
: x
: y;
};
final Map<String, JobExecutionInfo> allJobMap = new HashMap<>();
final Map<String, JobExecutionInfo> userJobMap = new HashMap<>();
for (int start = 0; (!refreshJobsOnly && userJobMap.size() < MAX_SIZE) || allJobMap.size() < MAX_SIZE; start += PAGE_SIZE) {
final List<JobExecution> page = jobExecutionDao.getJobExecutions(start, PAGE_SIZE);
if(page.size() == 0) {
break;
}
for (JobExecution jobExec: page) {
// ignore completed jobs when user does not want to see them
if (hideStatuses.contains(jobExec.getStatus())) {
continue;
}
if (!refreshJobsOnly && isInWhiteList(jobExec)) {
boolean isMine = isMine(jobExec);
if (userJobMap.size() < MAX_SIZE && isMine) {
JobExecutionInfo executionInfo = new JobExecutionInfo(jobExec, JobOwnerType.USER_JOB);
userJobMap.merge(getFoldingKey(jobExec), executionInfo, mergeFunction);
}
if (allJobMap.size() < MAX_SIZE) {
JobExecutionInfo executionInfo = new JobExecutionInfo(jobExec, JobOwnerType.ALL_JOB);
allJobMap.merge(getFoldingKey(jobExec), executionInfo, mergeFunction);
}
} else if (refreshJobsOnly) {
if (allJobMap.size() < MAX_SIZE && jobExec.getJobInstance().getJobName().startsWith("warming ")) {
JobExecutionInfo executionInfo = new JobExecutionInfo(jobExec, JobOwnerType.ALL_JOB);
allJobMap.merge(getFoldingKey(jobExec), executionInfo, mergeFunction);
}
}
if ((refreshJobsOnly || userJobMap.size() >= maxSize) && allJobMap.size() >= maxSize) {
break;
}
}
}
final List<JobExecutionInfo> jobs = new ArrayList<>(allJobMap.values());
jobs.addAll(userJobMap.values());
return jobs;
}
@Override
public Date getLastViewedTime() throws Exception {
final UserEntity user = securityEnabled ? permissionManager.getCurrentUser() : null;
return user != null ? user.getLastViewedNotificationsTime() : null;
}
@Override
public void setLastViewedTime(Date stamp) throws Exception {
final UserEntity user = securityEnabled ? permissionManager.getCurrentUser() : null;
if(user != null) {
user.setLastViewedNotificationsTime(stamp);
userRepository.save(user);
}
}
private static String getFoldingKey(JobExecution entity) {
final Optional<String> key = entity.getJobParameters().getParameters().keySet().stream().filter(FOLDING_KEYS::contains).findAny();
return key.map(s -> s + "_" + entity.getJobParameters().getString(s) + "_" + entity.getJobParameters().getString("source_id"))
.orElseGet(() -> String.valueOf(entity.getId()));
}
private static boolean isInWhiteList(JobExecution entity) {
return WHITE_LIST.contains(entity.getJobInstance().getJobName());
}
private boolean isMine(JobExecution jobExec) {
final String login = securityEnabled ? permissionManager.getSubjectName() : null;
final String jobAuthor = jobExec.getJobParameters().getString(Constants.Params.JOB_AUTHOR);
return Objects.equals(login, jobAuthor);
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobExecutionInfo.java | src/main/java/org/ohdsi/webapi/job/JobExecutionInfo.java | package org.ohdsi.webapi.job;
import org.springframework.batch.core.JobExecution;
public class JobExecutionInfo {
private JobExecution jobExecution;
private JobOwnerType ownerType;
public JobExecutionInfo(JobExecution jobExecution, JobOwnerType ownerType) {
this.jobExecution = jobExecution;
this.ownerType = ownerType;
}
public JobExecution getJobExecution() {
return jobExecution;
}
public void setJobExecution(JobExecution jobExecution) {
this.jobExecution = jobExecution;
}
public JobOwnerType getOwnerType() {
return ownerType;
}
public void setOwnerType(JobOwnerType ownerType) {
this.ownerType = ownerType;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/GeneratesNotification.java | src/main/java/org/ohdsi/webapi/job/GeneratesNotification.java | package org.ohdsi.webapi.job;
public interface GeneratesNotification {
String getJobName();
String getExecutionFoldingKey();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/job/JobInstanceResource.java | src/main/java/org/ohdsi/webapi/job/JobInstanceResource.java | package org.ohdsi.webapi.job;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
*
*/
public class JobInstanceResource {
@JsonProperty("instanceId")
private Long instanceId;
@JsonProperty("name")
private String name;
public JobInstanceResource() {
//needed for json deserialization
}
public JobInstanceResource(final Long instanceId) {
this.instanceId = instanceId;
}
public JobInstanceResource(final Long instanceId, final String name) {
this.instanceId = instanceId;
this.name = name;
}
/**
* @return the name
*/
public String getName() {
return this.name;
}
/**
* @param name the name to set
*/
public void setName(final String name) {
this.name = name;
}
/**
* @return the id
*/
public Long getInstanceId() {
return this.instanceId;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisDeserializer.java | src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisDeserializer.java | package org.ohdsi.webapi.feanalysis;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.ObjectCodec;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.fasterxml.jackson.databind.type.CollectionType;
import org.ohdsi.analysis.cohortcharacterization.design.CcResultType;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import org.ohdsi.webapi.feanalysis.dto.BaseFeAnalysisCriteriaDTO;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisAggregateDTO;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisWithConceptSetDTO;
import org.springframework.beans.factory.annotation.Autowired;
public class FeAnalysisDeserializer extends JsonDeserializer<FeAnalysisDTO> {
@Autowired
private ObjectMapper objectMapper;
// need to look around and find a way to override procedure of base mapping
// and handle only a design field
@Override
public FeAnalysisDTO deserialize(final JsonParser parser, final DeserializationContext ctxt) throws IOException, JsonProcessingException {
ObjectCodec codec = parser.getCodec();
JsonNode node = codec.readTree(parser);
FeAnalysisDTO dto = createDto(node);
final JsonNode name = node.get("name");
if (name != null && !name.isNull()) {
dto.setName(name.textValue());
}
final JsonNode description = node.get("description");
if (description != null && !name.isNull()) {
dto.setDescription(description.textValue());
}
final JsonNode descr = node.get("descr");
if (descr != null && !descr.isNull()) {
dto.setDescription(descr.textValue());
}
final JsonNode id = node.get("id");
if (id != null && !id.isNull()) {
dto.setId(id.intValue());
}
final JsonNode domain = node.get("domain");
if (domain != null && !domain.isNull()) {
final String domainString = domain.textValue();
dto.setDomain(StandardFeatureAnalysisDomain.valueOf(domainString));
}
final StandardFeatureAnalysisType analysisType = getType(node);
if (analysisType != null) {
dto.setType(analysisType);
final JsonNode design = node.get("design");
if (analysisType == StandardFeatureAnalysisType.CRITERIA_SET) {
JsonNode statType = node.get("statType");
if (statType != null) {
dto.setStatType(CcResultType.valueOf(statType.textValue()));
}
final List<BaseFeAnalysisCriteriaDTO> list = new ArrayList<>();
for (final JsonNode jsonNode : design) {
list.add(convert(jsonNode));
}
dto.setDesign(list);
final JsonNode conceptSets = node.get("conceptSets");
if (Objects.nonNull(conceptSets)) {
CollectionType typeRef = objectMapper.getTypeFactory().constructCollectionType(List.class, ConceptSet.class);
List<ConceptSet> conceptSetList = objectMapper.readValue(conceptSets.traverse(), typeRef);
((FeAnalysisWithConceptSetDTO)dto).setConceptSets(conceptSetList);
}
} else {
dto.setDesign(design.textValue());
}
}
return dto;
}
private StandardFeatureAnalysisType getType(JsonNode jsonNode) {
final JsonNode type = jsonNode.get("type");
StandardFeatureAnalysisType result = null;
if (Objects.nonNull(type) && !type.isNull()) {
result = StandardFeatureAnalysisType.valueOf(type.textValue());
}
return result;
}
private FeAnalysisDTO createDto(JsonNode jsonNode) {
final StandardFeatureAnalysisType type = getType(jsonNode);
FeAnalysisDTO analysisDTO;
if (Objects.equals(StandardFeatureAnalysisType.CRITERIA_SET, type)) {
analysisDTO = new FeAnalysisWithConceptSetDTO();
} else {
analysisDTO = new FeAnalysisDTO();
}
return analysisDTO;
}
private BaseFeAnalysisCriteriaDTO convert(final JsonNode node) {
try {
return objectMapper.treeToValue(node, BaseFeAnalysisCriteriaDTO.class);
} catch (JsonProcessingException e) {
throw new IllegalArgumentException(e);
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisController.java | src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisController.java | package org.ohdsi.webapi.feanalysis;
import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysis;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain;
import org.ohdsi.webapi.Pagination;
import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO;
import org.ohdsi.webapi.common.OptionDTO;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.feanalysis.domain.*;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisAggregateDTO;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO;
import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO;
import org.ohdsi.webapi.security.PermissionService;
import org.ohdsi.webapi.util.ExportUtil;
import org.ohdsi.webapi.util.HttpUtils;
import org.ohdsi.webapi.util.NameUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Path("/feature-analysis")
@Controller
public class FeAnalysisController {
private FeAnalysisService service;
private ConversionService conversionService;
private PermissionService permissionService;
FeAnalysisController(
final FeAnalysisService service,
final ConversionService conversionService,
PermissionService permissionService) {
this.service = service;
this.conversionService = conversionService;
this.permissionService = permissionService;
}
/**
* Get a pagable list of all feature analyses available in WebAPI
* @summary Feature analyses in WebAPI
* @param pageable
* @return
*/
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Page<FeAnalysisShortDTO> list(@Pagination Pageable pageable) {
return service.getPage(pageable).map(entity -> {
FeAnalysisShortDTO dto = convertFeAnaysisToShortDto(entity);
permissionService.fillWriteAccess(entity, dto);
return dto;
});
}
/**
* Does a feature analysis name already exist?
* @param id The id for a new feature analysis that does not already exist
* @param name The desired name for the new feature analysis
* @return 1 if the name conflicts with an existing feature analysis name and 0 otherwise
*/
@GET
@Path("/{id}/exists")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public int getCountFeWithSameName(@PathParam("id") @DefaultValue("0") final int id, @QueryParam("name") String name) {
return service.getCountFeWithSameName(id, name);
}
/**
* Feature analysis domains
* @return Feature analysis domains such as DRUG, DRUG_ERA, MEASUREMENT, etc.
*/
@GET
@Path("/domains")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public List<OptionDTO> listDomains() {
List<OptionDTO> options = new ArrayList<>();
for(StandardFeatureAnalysisDomain enumEntry: StandardFeatureAnalysisDomain.values()) {
options.add(new OptionDTO(enumEntry.name(), enumEntry.getName()));
}
return options;
}
/**
* Create a new feature analysis
* @param dto Feature analysis specification
* @return
*/
@POST
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public FeAnalysisDTO createAnalysis(final FeAnalysisDTO dto) {
final FeAnalysisEntity createdEntity = service.createAnalysis(conversionService.convert(dto, FeAnalysisEntity.class));
return convertFeAnalysisToDto(createdEntity);
}
/**
* Update an existing feature analysis
* @param feAnalysisId ID of Feature analysis to update
* @param dto Feature analysis specification
* @return
*/
@PUT
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public FeAnalysisDTO updateAnalysis(@PathParam("id") final Integer feAnalysisId, final FeAnalysisDTO dto) {
final FeAnalysisEntity updatedEntity = service.updateAnalysis(feAnalysisId, conversionService.convert(dto, FeAnalysisEntity.class));
return convertFeAnalysisToDto(updatedEntity);
}
/**
* Delete a feature analysis
* @param feAnalysisId ID of feature analysis to delete
*/
@DELETE
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
public void deleteAnalysis(@PathParam("id") final Integer feAnalysisId) {
final FeAnalysisEntity entity = service.findById(feAnalysisId).orElseThrow(NotFoundException::new);
service.deleteAnalysis(entity);
}
/**
* Get data about a specific feature analysis
* @param feAnalysisId ID of feature analysis to retrieve
* @return ID, type, name domain, description, etc of feature analysis
*/
@GET
@Transactional
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
public FeAnalysisDTO getFeAnalysis(@PathParam("id") final Integer feAnalysisId) {
final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId)
.orElseThrow(NotFoundException::new);
return convertFeAnalysisToDto(feAnalysis);
}
@GET
@Path("/{id}/export/conceptset")
public Response exportConceptSets(@PathParam("id") final Integer feAnalysisId) {
final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId).orElseThrow(NotFoundException::new);
if (feAnalysis instanceof FeAnalysisWithCriteriaEntity) {
List<ConceptSetExport> exportList = service.exportConceptSets((FeAnalysisWithCriteriaEntity<?>) feAnalysis);
ByteArrayOutputStream stream = ExportUtil.writeConceptSetExportToCSVAndZip(exportList);
return HttpUtils.respondBinary(stream, String.format("featureAnalysis_%d_export.zip", feAnalysisId));
} else {
throw new BadRequestException();
}
}
/**
* Create a copy of a feature analysis
* @param feAnalysisId ID of feature analysis to copy
* @return The design specification of the new copy
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/copy")
@Transactional
public FeAnalysisDTO copy(@PathParam("id") final Integer feAnalysisId) {
final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId)
.orElseThrow(NotFoundException::new);
final FeAnalysisEntity feAnalysisForCopy = getNewEntityForCopy(feAnalysis);
FeAnalysisEntity saved;
switch (feAnalysis.getType()) {
case CRITERIA_SET:
saved = service.createCriteriaAnalysis((FeAnalysisWithCriteriaEntity) feAnalysisForCopy);
break;
case CUSTOM_FE:
saved = service.createAnalysis(feAnalysisForCopy);
break;
default:
throw new IllegalArgumentException("Analysis with type: " + feAnalysis.getType() + " cannot be copied");
}
return convertFeAnalysisToDto(saved);
}
private FeAnalysisEntity getNewEntityForCopy(FeAnalysisEntity entity) {
FeAnalysisEntity entityForCopy;
switch (entity.getType()) {
case CRITERIA_SET:
switch (entity.getStatType()) {
case PREVALENCE:
entityForCopy = new FeAnalysisWithPrevalenceCriteriaEntity((FeAnalysisWithCriteriaEntity) entity);
break;
case DISTRIBUTION:
entityForCopy = new FeAnalysisWithDistributionCriteriaEntity((FeAnalysisWithCriteriaEntity) entity);
break;
default:
throw new IllegalArgumentException();
}
// deep copy of criteria list...
final List<FeAnalysisCriteriaEntity> criteriaList = new ArrayList<>();
((FeAnalysisWithCriteriaEntity) entity).getDesign().forEach(c -> {
final FeAnalysisCriteriaEntity criteria = createCriteriaEntity((FeAnalysisCriteriaEntity) c);
criteria.setName(((FeAnalysisCriteriaEntity) c).getName());
criteria.setExpressionString(((FeAnalysisCriteriaEntity) c).getExpressionString());
criteria.setAggregate(((FeAnalysisCriteriaEntity) c).getAggregate());
criteriaList.add(criteria);
});
entityForCopy.setDesign(criteriaList);
// ...and concept sets
final FeAnalysisConcepsetEntity concepsetEntity = new FeAnalysisConcepsetEntity();
concepsetEntity.setRawExpression(((FeAnalysisWithCriteriaEntity) entity).getConceptSetEntity().getRawExpression());
((FeAnalysisWithCriteriaEntity) entityForCopy).setConceptSetEntity(concepsetEntity);
break;
case CUSTOM_FE:
entityForCopy = new FeAnalysisWithStringEntity((FeAnalysisWithStringEntity) entity);
break;
default:
throw new IllegalArgumentException("Analysis with type: " + entity.getType() + " cannot be copied");
}
entityForCopy.setId(null);
entityForCopy.setName(
NameUtils.getNameForCopy(entityForCopy.getName(), this::getNamesLike, service.findByName(entityForCopy.getName())));
entityForCopy.setCreatedBy(null);
entityForCopy.setCreatedDate(null);
entityForCopy.setModifiedBy(null);
entityForCopy.setModifiedDate(null);
return entityForCopy;
}
/**
* Get aggregation functions used in feature analyses
* @return
*/
@GET
@Path("/aggregates")
@Produces(MediaType.APPLICATION_JSON)
public List<FeAnalysisAggregateDTO> listAggregates() {
List<FeAnalysisAggregateDTO> result = service.findAggregates().stream()
.map(this::convertFeAnalysisAggregateToDto)
.collect(Collectors.toList());
return result;
}
private FeAnalysisShortDTO convertFeAnaysisToShortDto(final FeatureAnalysis entity) {
return conversionService.convert(entity, FeAnalysisShortDTO.class);
}
private FeAnalysisDTO convertFeAnalysisToDto(final FeatureAnalysis entity) {
return conversionService.convert(entity, FeAnalysisDTO.class);
}
private List<String> getNamesLike(String copyName) {
return service.getNamesLike(copyName);
}
private FeAnalysisAggregateDTO convertFeAnalysisAggregateToDto(final FeAnalysisAggregateEntity entity) {
return conversionService.convert(entity, FeAnalysisAggregateDTO.class);
}
private FeAnalysisCriteriaEntity createCriteriaEntity(FeAnalysisCriteriaEntity basis) {
if (basis instanceof FeAnalysisWindowedCriteriaEntity) {
return new FeAnalysisWindowedCriteriaEntity();
} else if (basis instanceof FeAnalysisDemographicCriteriaEntity) {
return new FeAnalysisDemographicCriteriaEntity();
} else {
return new FeAnalysisCriteriaGroupEntity();
}
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisService.java | src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisService.java | package org.ohdsi.webapi.feanalysis;
import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisAggregateEntity;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisCriteriaEntity;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithCriteriaEntity;
import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
public interface FeAnalysisService {
Page<FeAnalysisEntity> getPage(final Pageable pageable);
int getCountFeWithSameName(Integer id, String name);
List<FeAnalysisWithStringEntity> findPresetAnalysesBySystemNames(Collection<String> names);
FeAnalysisEntity createAnalysis(FeAnalysisEntity analysis);
Optional<FeAnalysisEntity> findById(Integer id);
Optional<FeAnalysisEntity> findByName(String name);
FeAnalysisWithCriteriaEntity createCriteriaAnalysis(FeAnalysisWithCriteriaEntity analysis);
Set<FeAnalysisEntity> findByCohortCharacterization(CohortCharacterizationEntity cohortCharacterization);
List<FeAnalysisWithStringEntity> findAllPresetAnalyses();
FeAnalysisEntity updateAnalysis(Integer feAnalysisId, FeAnalysisEntity convert);
void deleteAnalysis(FeAnalysisEntity entity);
void deleteAnalysis(int id);
List<String> getNamesLike(String name);
List<ConceptSetExport> exportConceptSets(FeAnalysisWithCriteriaEntity<?> analysisEntity);
Optional<? extends FeAnalysisEntity> findByDesignAndName(FeAnalysisWithStringEntity withStringEntity, final String name);
Optional<FeAnalysisEntity> findByCriteriaListAndCsAndDomainAndStat(List<? extends FeAnalysisCriteriaEntity> newCriteriaList, FeAnalysisWithCriteriaEntity<? extends FeAnalysisCriteriaEntity> feAnalysis);
List<FeAnalysisAggregateEntity> findAggregates();
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisServiceImpl.java | src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisServiceImpl.java | package org.ohdsi.webapi.feanalysis;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.cohortcharacterization.design.CcResultType;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity;
import org.ohdsi.webapi.conceptset.ConceptSetExport;
import org.ohdsi.webapi.feanalysis.domain.*;
import org.ohdsi.webapi.feanalysis.event.FeAnalysisChangedEvent;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisAggregateRepository;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisCriteriaRepository;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisEntityRepository;
import org.ohdsi.webapi.feanalysis.repository.FeAnalysisWithStringEntityRepository;
import org.ohdsi.webapi.source.SourceInfo;
import org.ohdsi.webapi.util.EntityUtils;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.ws.rs.NotFoundException;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.ohdsi.webapi.service.AbstractDaoService;
import org.ohdsi.webapi.service.VocabularyService;
@Service
@Transactional(readOnly = true)
public class FeAnalysisServiceImpl extends AbstractDaoService implements FeAnalysisService {
private final FeAnalysisEntityRepository analysisRepository;
private final FeAnalysisCriteriaRepository criteriaRepository;
private final FeAnalysisWithStringEntityRepository stringAnalysisRepository;
private final VocabularyService vocabularyService;
private final ApplicationEventPublisher eventPublisher;
private FeAnalysisAggregateRepository aggregateRepository;
private final EntityGraph defaultEntityGraph = EntityUtils.fromAttributePaths(
"createdBy",
"modifiedBy"
);
public FeAnalysisServiceImpl(
final FeAnalysisEntityRepository analysisRepository,
final FeAnalysisCriteriaRepository criteriaRepository,
final FeAnalysisWithStringEntityRepository stringAnalysisRepository,
final VocabularyService vocabularyService,
final FeAnalysisAggregateRepository aggregateRepository,
final ApplicationEventPublisher eventPublisher) {
this.analysisRepository = analysisRepository;
this.criteriaRepository = criteriaRepository;
this.stringAnalysisRepository = stringAnalysisRepository;
this.vocabularyService = vocabularyService;
this.aggregateRepository = aggregateRepository;
this.eventPublisher = eventPublisher;
}
@Override
public Page<FeAnalysisEntity> getPage(final Pageable pageable) {
return analysisRepository.findAll(pageable, defaultEntityGraph);
}
@Override
public int getCountFeWithSameName(Integer id, String name){
return analysisRepository.getCountFeWithSameName(id, name);
}
@Override
public List<FeAnalysisWithStringEntity> findPresetAnalysesBySystemNames(Collection<String> names) {
return stringAnalysisRepository.findByDesignIn(names);
}
@Override
@Transactional
public FeAnalysisEntity createAnalysis(final FeAnalysisEntity analysis) {
if (analysis.getStatType() == null) {
analysis.setStatType(CcResultType.PREVALENCE);
}
return saveNew(analysis);
}
@Override
public Optional<FeAnalysisEntity> findById(Integer id) {
return analysisRepository.findById(id, defaultEntityGraph);
}
@Override
public Optional<FeAnalysisEntity> findByName(String name) {
return analysisRepository.findByName(name);
}
@Override
@Transactional
public FeAnalysisWithCriteriaEntity createCriteriaAnalysis(final FeAnalysisWithCriteriaEntity analysis) {
FeAnalysisWithCriteriaEntity newAnalysis = newAnalysis(analysis);
newAnalysis.setDesign(Collections.emptyList());
final FeAnalysisWithCriteriaEntity entityWithMainFields = saveNew(newAnalysis);
if (createOrUpdateConceptSetEntity(entityWithMainFields, analysis.getConceptSetEntity())) {
analysisRepository.save(entityWithMainFields);
}
final List<FeAnalysisCriteriaEntity> criteriaList = createCriteriaListForAnalysis(entityWithMainFields, analysis.getDesign());
entityWithMainFields.setDesign(criteriaList);
return entityWithMainFields;
}
private boolean createOrUpdateConceptSetEntity(FeAnalysisWithCriteriaEntity analysis, FeAnalysisConcepsetEntity modifiedConceptSet) {
if (Objects.nonNull(modifiedConceptSet)) {
FeAnalysisConcepsetEntity concepsetEntity = Optional.ofNullable(analysis.getConceptSetEntity())
.orElseGet(FeAnalysisConcepsetEntity::new);
concepsetEntity.setFeatureAnalysis(analysis);
concepsetEntity.setRawExpression(modifiedConceptSet.getRawExpression());
analysis.setConceptSetEntity(concepsetEntity);
return true;
} else {
return false;
}
}
private <T extends FeAnalysisEntity> T saveNew(T entity) {
entity.setCreatedBy(getCurrentUser());
entity.setCreatedDate(new Date());
return analysisRepository.saveAndFlush(entity);
}
private FeAnalysisWithCriteriaEntity newAnalysis(final FeAnalysisWithCriteriaEntity analysis) {
if (Objects.equals(analysis.getStatType(), CcResultType.PREVALENCE)) {
return new FeAnalysisWithPrevalenceCriteriaEntity(analysis);
} else if (Objects.equals(analysis.getStatType(), CcResultType.DISTRIBUTION)) {
return new FeAnalysisWithDistributionCriteriaEntity(analysis);
}
throw new IllegalArgumentException();
}
private List<FeAnalysisCriteriaEntity> createCriteriaListForAnalysis(final FeAnalysisWithCriteriaEntity analysis, final List<FeAnalysisCriteriaEntity> design) {
return design.stream()
.peek(criteria -> criteria.setFeatureAnalysis(analysis))
.map(criteria -> criteriaRepository.save(criteria))
.collect(Collectors.toList());
}
@Override
public Set<FeAnalysisEntity> findByCohortCharacterization(final CohortCharacterizationEntity cohortCharacterization) {
return analysisRepository.findAllByCohortCharacterizations(cohortCharacterization);
}
@Override
public List<FeAnalysisWithStringEntity> findAllPresetAnalyses() {
return analysisRepository.findAllByType(StandardFeatureAnalysisType.PRESET).stream().map(a -> (FeAnalysisWithStringEntity) a).collect(Collectors.toList());
}
@Override
@Transactional
public FeAnalysisEntity updateAnalysis(Integer feAnalysisId, FeAnalysisEntity updatedEntity) {
FeAnalysisEntity savedEntity = findById(feAnalysisId).orElseThrow(NotFoundException::new);
checkEntityLocked(savedEntity);
savedEntity.setDescr(updatedEntity.getDescr());
if (savedEntity instanceof FeAnalysisWithCriteriaEntity && updatedEntity instanceof FeAnalysisWithCriteriaEntity) {
FeAnalysisWithCriteriaEntity<?> updatedWithCriteriaEntity = (FeAnalysisWithCriteriaEntity) updatedEntity,
savedWithCriteria = (FeAnalysisWithCriteriaEntity) savedEntity;
removeFeAnalysisCriteriaEntities(savedWithCriteria, updatedWithCriteriaEntity);
updatedWithCriteriaEntity.getDesign().forEach(criteria -> criteria.setFeatureAnalysis(savedWithCriteria));
createOrUpdateConceptSetEntity((FeAnalysisWithCriteriaEntity) savedEntity, updatedWithCriteriaEntity.getConceptSetEntity());
}
savedEntity.setDesign(updatedEntity.getDesign());
if (Objects.nonNull(updatedEntity.getDomain())) {
savedEntity.setDomain(updatedEntity.getDomain());
}
savedEntity.setLocked(updatedEntity.getLocked());
if (StringUtils.isNotEmpty(updatedEntity.getName())) {
savedEntity.setName(updatedEntity.getName());
}
if (updatedEntity.getStatType() != null) {
savedEntity.setStatType(updatedEntity.getStatType());
}
if (Objects.nonNull(updatedEntity.getType())) {
savedEntity.setType(updatedEntity.getType());
}
savedEntity.setModifiedBy(getCurrentUser());
savedEntity.setModifiedDate(new Date());
savedEntity = analysisRepository.save(savedEntity);
eventPublisher.publishEvent(new FeAnalysisChangedEvent(savedEntity));
return savedEntity;
}
private void removeFeAnalysisCriteriaEntities(FeAnalysisWithCriteriaEntity<?> original, FeAnalysisWithCriteriaEntity<?> updated) {
List<FeAnalysisCriteriaEntity> removed = original.getDesign().stream()
.filter(c -> updated.getDesign().stream().noneMatch(u -> Objects.equals(c.getId(), u.getId())))
.collect(Collectors.toList());
criteriaRepository.delete(removed);
}
@Override
@Transactional
public void deleteAnalysis(FeAnalysisEntity entity) {
checkEntityLocked(entity);
analysisRepository.delete(entity);
}
@Override
@Transactional
public void deleteAnalysis(int id) {
deleteAnalysis(analysisRepository.findById(id).orElseThrow(() -> new RuntimeException("There is no Feature Analysis with id = " + id)));
}
@Override
public List<String> getNamesLike(String name) {
return analysisRepository.findAllByNameStartsWith(name).stream().map(FeAnalysisEntity::getName).collect(Collectors.toList());
}
@Override
public List<ConceptSetExport> exportConceptSets(FeAnalysisWithCriteriaEntity<?> analysisEntity) {
SourceInfo sourceInfo = new SourceInfo(vocabularyService.getPriorityVocabularySource());
List<ConceptSet> conceptSets = analysisEntity.getConceptSets();
return conceptSets.stream()
.map(cs -> vocabularyService.exportConceptSet(cs, sourceInfo))
.collect(Collectors.toList());
}
@Override
public Optional<? extends FeAnalysisEntity> findByDesignAndName(final FeAnalysisWithStringEntity withStringEntity, final String name) {
return this.findByDesignAndPredicate(withStringEntity.getDesign(), f -> Objects.equals(f.getName(), name));
}
@Override
public Optional<FeAnalysisEntity> findByCriteriaListAndCsAndDomainAndStat(List<? extends FeAnalysisCriteriaEntity> newCriteriaList, FeAnalysisWithCriteriaEntity<? extends FeAnalysisCriteriaEntity> newFeAnalysis) {
Map<FeAnalysisWithCriteriaEntity, List<FeAnalysisCriteriaEntity>> feAnalysisEntityListMap = newCriteriaList.stream()
.map(c -> criteriaRepository.findAllByExpressionString(c.getExpressionString()))
.flatMap(List::stream).collect(Collectors.groupingBy(FeAnalysisCriteriaEntity::getFeatureAnalysis));
return feAnalysisEntityListMap.entrySet().stream().filter(e -> {
FeAnalysisWithCriteriaEntity feAnalysis = e.getKey();
return checkCriteriaList(e.getValue(), newCriteriaList) &&
CollectionUtils.isEqualCollection(feAnalysis.getConceptSets(), newFeAnalysis.getConceptSets()) &&
feAnalysis.getDomain().equals(newFeAnalysis.getDomain()) &&
feAnalysis.getStatType().equals(newFeAnalysis.getStatType());
}).findAny().map(Map.Entry::getKey);
}
private boolean checkCriteriaList(List<FeAnalysisCriteriaEntity> curCriteriaList, List<? extends FeAnalysisCriteriaEntity> newCriteriaList) {
List<String> currentList = curCriteriaList.stream().map(FeAnalysisCriteriaEntity::getExpressionString).collect(Collectors.toList());
List<String> newList = newCriteriaList.stream().map(FeAnalysisCriteriaEntity::getExpressionString).collect(Collectors.toList());
return CollectionUtils.isEqualCollection(currentList, newList);
}
private Optional<? extends FeAnalysisEntity> findByDesignAndPredicate(final String design, final Predicate<FeAnalysisEntity> f) {
List<? extends FeAnalysisEntity> detailsFromDb = stringAnalysisRepository.findByDesign(design);
return detailsFromDb
.stream()
.filter(f)
.findFirst();
}
private void checkEntityLocked(FeAnalysisEntity entity) {
if (entity.getLocked() == Boolean.TRUE) {
throw new IllegalArgumentException(String.format("Feature analysis %s is locked.", entity.getName()));
}
}
@Override
public List<FeAnalysisAggregateEntity> findAggregates() {
return aggregateRepository.findAll();
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWithConceptSetDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWithConceptSetDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.circe.cohortdefinition.ConceptSet;
import java.util.List;
public class FeAnalysisWithConceptSetDTO extends FeAnalysisDTO {
@JsonProperty("conceptSets")
private List<ConceptSet> conceptSets;
public List<ConceptSet> getConceptSets() {
return conceptSets;
}
public void setConceptSets(List<ConceptSet> conceptSets) {
this.conceptSets = conceptSets;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisCriteriaDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisCriteriaDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.circe.cohortdefinition.CriteriaGroup;
public class FeAnalysisCriteriaDTO extends BaseFeAnalysisCriteriaDTO {
@JsonProperty("expression")
private CriteriaGroup expression;
@JsonProperty("aggregate")
private FeAnalysisAggregateDTO aggregate;
public FeAnalysisCriteriaDTO() {
}
public FeAnalysisCriteriaDTO(Long id, String name, CriteriaGroup expression) {
super(id, name);
this.expression = expression;
}
public CriteriaGroup getExpression() {
return expression;
}
public void setExpression(final CriteriaGroup expression) {
this.expression = expression;
}
@Override
public FeAnalysisAggregateDTO getAggregate() {
return aggregate;
}
@Override
public void setAggregate(FeAnalysisAggregateDTO aggregate) {
this.aggregate = aggregate;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/BaseFeAnalysisCriteriaDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/BaseFeAnalysisCriteriaDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "criteriaType", defaultImpl = FeAnalysisCriteriaDTO.class)
@JsonSubTypes({
@JsonSubTypes.Type(value = FeAnalysisCriteriaDTO.class, name = "CriteriaGroup"),
@JsonSubTypes.Type(value = FeAnalysisWindowedCriteriaDTO.class, name = "WindowedCriteria"),
@JsonSubTypes.Type(value = FeAnalysisDemographicCriteriaDTO.class, name = "DemographicCriteria")
})
public abstract class BaseFeAnalysisCriteriaDTO {
@JsonProperty("id")
private Long id;
@JsonProperty("name")
private String name;
@JsonProperty("aggregate")
private FeAnalysisAggregateDTO aggregate;
public BaseFeAnalysisCriteriaDTO() {
}
public BaseFeAnalysisCriteriaDTO(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public FeAnalysisAggregateDTO getAggregate() {
return aggregate;
}
public void setAggregate(FeAnalysisAggregateDTO aggregate) {
this.aggregate = aggregate;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWindowedCriteriaDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWindowedCriteriaDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.circe.cohortdefinition.WindowedCriteria;
public class FeAnalysisWindowedCriteriaDTO extends BaseFeAnalysisCriteriaDTO {
@JsonProperty("expression")
private WindowedCriteria expression;
public FeAnalysisWindowedCriteriaDTO() {
}
public FeAnalysisWindowedCriteriaDTO(Long id, String name, WindowedCriteria expression) {
super(id, name);
this.expression = expression;
}
public WindowedCriteria getExpression() {
return expression;
}
public void setExpression(WindowedCriteria expression) {
this.expression = expression;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisAggregateDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisAggregateDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import org.ohdsi.analysis.TableJoin;
import org.ohdsi.analysis.cohortcharacterization.design.AggregateFunction;
import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisAggregate;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain;
import org.ohdsi.circe.cohortdefinition.builders.CriteriaColumn;
public class FeAnalysisAggregateDTO implements FeatureAnalysisAggregate {
@JsonProperty("id")
private Integer id;
@JsonProperty("name")
private String name;
@JsonProperty("domain")
private StandardFeatureAnalysisDomain domain;
@JsonProperty("function")
private AggregateFunction function;
@JsonProperty("expression")
private String expression;
@JsonProperty("joinTable")
private String joinTable;
@JsonProperty("joinType")
private TableJoin joinType;
@JsonProperty("joinCondition")
private String joinCondition;
@JsonProperty("isDefault")
private boolean isDefault;
@JsonProperty("missingMeansZero")
private boolean missingMeansZero;
@JsonProperty("additionalColumns")
private List<CriteriaColumn> columns;
@Override
public List<CriteriaColumn> getAdditionalColumns() {
return columns;
}
public void setAdditionalColumns(List<CriteriaColumn> columns) {
this.columns = columns;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public StandardFeatureAnalysisDomain getDomain() {
return domain;
}
public void setDomain(StandardFeatureAnalysisDomain domain) {
this.domain = domain;
}
public AggregateFunction getFunction() {
return function;
}
public void setFunction(AggregateFunction function) {
this.function = function;
}
public String getExpression() {
return expression;
}
public void setExpression(String expression) {
this.expression = expression;
}
public String getJoinTable() {
return joinTable;
}
public void setJoinTable(String joinTable) {
this.joinTable = joinTable;
}
public boolean isDefault() {
return isDefault;
}
public void setDefault(boolean aDefault) {
isDefault = aDefault;
}
public TableJoin getJoinType() {
return joinType;
}
public void setJoinType(TableJoin joinType) {
this.joinType = joinType;
}
public String getJoinCondition() {
return joinCondition;
}
public void setJoinCondition(String joinCondition) {
this.joinCondition = joinCondition;
}
public boolean isMissingMeansZero() {
return missingMeansZero;
}
public void setMissingMeansZero(boolean missingMeansZero) {
this.missingMeansZero = missingMeansZero;
}
@JsonIgnore
@Override
/* this is required by the interface, although not used anywhere */
public boolean hasQuery() {
return false;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
OHDSI/WebAPI | https://github.com/OHDSI/WebAPI/blob/b22d7bf02d98c0cf78c46801877368d6ecad2245/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisShortDTO.java | src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisShortDTO.java | package org.ohdsi.webapi.feanalysis.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.ohdsi.analysis.cohortcharacterization.design.CcResultType;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain;
import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType;
import org.ohdsi.webapi.service.dto.CommonEntityDTO;
public class FeAnalysisShortDTO extends CommonEntityDTO {
@JsonProperty("description")
protected String description;
protected Boolean supportsAnnual;
protected Boolean supportsTemporal;
@JsonProperty("id")
private Integer id;
@JsonProperty("name")
private String name;
@JsonProperty("type")
private StandardFeatureAnalysisType type;
@JsonProperty("domain")
private StandardFeatureAnalysisDomain domain;
@JsonProperty("statType")
private CcResultType statType;
@JsonProperty("includeAnnual")
private Boolean includeAnnual;
@JsonProperty("includeTemporal")
private Boolean includeTemporal;
public Integer getId() {
return id;
}
public void setId(final Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public StandardFeatureAnalysisType getType() {
return type;
}
public void setType(final StandardFeatureAnalysisType type) {
this.type = type;
}
public StandardFeatureAnalysisDomain getDomain() {
return domain;
}
public void setDomain(final StandardFeatureAnalysisDomain domain) {
this.domain = domain;
}
public String getDescription() {
return description;
}
public void setDescription(final String description) {
this.description = description;
}
public CcResultType getStatType() {
return statType;
}
public void setStatType(CcResultType statType) {
this.statType = statType;
}
public Boolean getSupportsAnnual() {
return supportsAnnual;
}
public void setSupportsAnnual(Boolean supportsAnnual) {
this.supportsAnnual = supportsAnnual;
}
public Boolean getSupportsTemporal() {
return supportsTemporal;
}
public void setSupportsTemporal(Boolean supportsTemporal) {
this.supportsTemporal = supportsTemporal;
}
public Boolean getIncludeAnnual() {
return includeAnnual;
}
public void setIncludeAnnual(Boolean includeAnnual) {
this.includeAnnual = includeAnnual;
}
public Boolean getIncludeTemporal() {
return includeTemporal;
}
public void setIncludeTemporal(Boolean includeTemporal) {
this.includeTemporal = includeTemporal;
}
}
| java | Apache-2.0 | b22d7bf02d98c0cf78c46801877368d6ecad2245 | 2026-01-05T02:37:20.475642Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.