answer
stringlengths 17
10.2M
|
|---|
package net.grandcentrix.tray.provider;
import android.content.res.Resources;
import android.net.Uri;
import android.test.AndroidTestCase;
import android.test.mock.MockContext;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TrayContractTest extends AndroidTestCase {
public void testConstruction() throws Exception {
new TrayContract();
}
public void testGenerateContentUri() throws Exception {
Uri uri = TrayContract.generateContentUri(getContext());
assertEquals("content://net.grandcentrix.tray.preferences.test/preferences",
uri.toString());
TrayContract.setAuthority("asdf");
uri = TrayContract.generateContentUri(getContext());
assertEquals("content://asdf/preferences", uri.toString());
}
public void testGenerateInternalContentUri() throws Exception {
Uri uri = TrayContract.generateInternalContentUri(getContext());
assertEquals("content://net.grandcentrix.tray.preferences.test/internal_preferences",
uri.toString());
TrayContract.setAuthority("blubb");
uri = TrayContract.generateInternalContentUri(getContext());
assertEquals("content://blubb/internal_preferences", uri.toString());
}
public void testGenerateInternalContentUri_WithoutProviderAuthority_AppShouldCrash()
throws Exception {
final String authority = TrayContentProvider.mAuthority;
TrayContentProvider.mAuthority = null;
try {
TrayContract.generateInternalContentUri(getContext());
fail();
} catch (RuntimeException e) {
assertTrue(e.getMessage().contains("Internal tray error"));
}
TrayContentProvider.mAuthority = authority;
}
public void testLogcatOutput_ShouldPrintIfTrayAuthorityIsNotDefault() throws Exception {
final MockContext stuff = new MockContext() {
@Override
public Resources getResources() {
final Resources mockResources = mock(Resources.class);
when(mockResources.getString(anyInt())).thenReturn(eq("notDefaultTrayAuthority"));
return mockResources;
}
};
TrayContract.generateInternalContentUri(stuff);
assertTrue(true);
}
@Override
protected void setUp() throws Exception {
super.setUp();
TrayContract.setAuthority(null);
}
}
|
package com.matthewtamlin.spyglass.library.core;
import com.matthewtamlin.spyglass.library.meta_annotations.Default;
import com.matthewtamlin.spyglass.library.meta_annotations.Handler;
import com.matthewtamlin.spyglass.library.meta_annotations.Use;
import com.matthewtamlin.spyglass.library.use_adapters.UseAdapter;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull;
public class AnnotationUtil {
public static Annotation getHandlerAnnotation(final Field field) {
for (final Annotation a : field.getDeclaredAnnotations()) {
if (a.annotationType().isAnnotationPresent(Handler.class)) {
return a;
}
}
return null;
}
public static Annotation getHandlerAnnotation(final Method method) {
for (final Annotation a : method.getDeclaredAnnotations()) {
if (a.annotationType().isAnnotationPresent(Handler.class)) {
return a;
}
}
return null;
}
public static Annotation getDefaultAnnotation(final Field field) {
for (final Annotation a : field.getDeclaredAnnotations()) {
if (a.annotationType().isAnnotationPresent(Default.class)) {
return a;
}
}
return null;
}
public static Annotation getDefaultAnnotation(final Method method) {
for (final Annotation a : method.getDeclaredAnnotations()) {
if (a.annotationType().isAnnotationPresent(Default.class)) {
return a;
}
}
return null;
}
public static void validateAnnotations(final Field field) {
//TODO
}
public static void validateAnnotations(final Method method) {
//TODO
}
public static TreeMap<Integer, Annotation> getUseAnnotations(final Method method) {
final Map<Integer, Annotation> useAnnotationsByIndex = new TreeMap<>();
final Annotation[][] annotationsByParam = method.getParameterAnnotations();
for (int i = 0; i < annotationsByParam.length; i++) {
final Annotation[] singleParamAnnotations = annotationsByParam[i];
final Annotation useAnnotation = getUseAnnotationForMethodParam(singleParamAnnotations);
if (useAnnotation != null) {
useAnnotationsByIndex.put(i, useAnnotation);
}
}
return useAnnotationsByIndex;
}
private static Annotation getUseAnnotationForMethodParam(final Annotation[] annotations) {
for (final Annotation a : annotations) {
final Use useAnnotation = a.annotationType().getAnnotation(Use.class);
if (useAnnotation != null) {
return a;
}
}
return null;
}
}
|
package org.libreplan.web.workreports;
import static org.libreplan.web.I18nHelper._;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.Validate;
import org.hibernate.Hibernate;
import org.libreplan.business.common.IntegrationEntity;
import org.libreplan.business.common.daos.IConfigurationDAO;
import org.libreplan.business.common.entities.EntityNameEnum;
import org.libreplan.business.common.exceptions.InstanceNotFoundException;
import org.libreplan.business.common.exceptions.ValidationException;
import org.libreplan.business.costcategories.daos.ITypeOfWorkHoursDAO;
import org.libreplan.business.costcategories.entities.TypeOfWorkHours;
import org.libreplan.business.labels.daos.ILabelDAO;
import org.libreplan.business.labels.entities.Label;
import org.libreplan.business.labels.entities.LabelType;
import org.libreplan.business.orders.daos.IOrderElementDAO;
import org.libreplan.business.orders.daos.ISumChargedEffortDAO;
import org.libreplan.business.orders.entities.OrderElement;
import org.libreplan.business.orders.entities.OrderLineGroup;
import org.libreplan.business.resources.daos.IWorkerDAO;
import org.libreplan.business.resources.entities.Resource;
import org.libreplan.business.resources.entities.Worker;
import org.libreplan.business.workreports.daos.IWorkReportDAO;
import org.libreplan.business.workreports.daos.IWorkReportTypeDAO;
import org.libreplan.business.workreports.entities.WorkReport;
import org.libreplan.business.workreports.entities.WorkReportLabelTypeAssigment;
import org.libreplan.business.workreports.entities.WorkReportLine;
import org.libreplan.business.workreports.entities.WorkReportType;
import org.libreplan.business.workreports.valueobjects.DescriptionField;
import org.libreplan.business.workreports.valueobjects.DescriptionValue;
import org.libreplan.web.common.IntegrationEntityModel;
import org.libreplan.web.common.concurrentdetection.OnConcurrentModification;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.zkoss.ganttz.IPredicate;
@Service
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
@OnConcurrentModification(goToPage = "/workreports/workReport.zul")
public class WorkReportModel extends IntegrationEntityModel implements
IWorkReportModel {
@Autowired
private IWorkReportTypeDAO workReportTypeDAO;
@Autowired
private IWorkReportDAO workReportDAO;
@Autowired
private IOrderElementDAO orderElementDAO;
@Autowired
private IWorkerDAO workerDAO;
@Autowired
private ILabelDAO labelDAO;
@Autowired
private IConfigurationDAO configurationDAO;
@Autowired
private ITypeOfWorkHoursDAO typeOfWorkHoursDAO;
@Autowired
private ISumChargedEffortDAO sumChargedEffortDAO;
private WorkReportType workReportType;
private WorkReport workReport;
private boolean editing = false;
private boolean listingQuery = false;
private static final Map<LabelType, List<Label>> mapLabelTypes = new HashMap<LabelType, List<Label>>();
private List<WorkReportDTO> listWorkReportDTOs = new ArrayList<WorkReportDTO>();
private List<WorkReportLine> listWorkReportLine = new ArrayList<WorkReportLine>();
private Set<WorkReportLine> deletedWorkReportLinesSet = new HashSet<WorkReportLine>();
@Override
public WorkReport getWorkReport() {
return workReport;
}
@Override
public WorkReportType getWorkReportType() {
return this.workReportType;
}
@Override
@Transactional(readOnly = true)
public void initCreate(WorkReportType workReportType) {
editing = false;
forceLoadWorkReportTypeFromDB(workReportType);
workReport = WorkReport.create(this.workReportType);
workReport.setCodeAutogenerated(configurationDAO.getConfiguration()
.getGenerateCodeForWorkReport());
if (!workReport.isCodeAutogenerated()) {
workReport.setCode("");
}else{
setDefaultCode();
}
loadMaps();
deletedWorkReportLinesSet = new HashSet<WorkReportLine>();
}
@Override
@Transactional(readOnly = true)
public void initEdit(WorkReport workReport) {
editing = true;
Validate.notNull(workReport);
this.workReport = getFromDB(workReport);
forceLoadWorkReportTypeFromDB(workReport.getWorkReportType());
loadMaps();
initOldCodes();
deletedWorkReportLinesSet = new HashSet<WorkReportLine>();
}
@Transactional(readOnly = true)
private WorkReport getFromDB(WorkReport workReport) {
return getFromDB(workReport.getId());
}
@Transactional(readOnly = true)
private WorkReport getFromDB(Long id) {
try {
WorkReport result = workReportDAO.find(id);
forceLoadEntities(result);
return result;
} catch (InstanceNotFoundException e) {
throw new RuntimeException(e);
}
}
/**
* Load entities that will be needed in the conversation
*
* @param workReport
*/
private void forceLoadEntities(WorkReport workReport) {
// Load WorkReportType
workReport.getWorkReportType().getName();
if (workReport.getResource() != null) {
workReport.getResource().getShortDescription();
}
if (workReport.getOrderElement() != null) {
workReport.getOrderElement().getCode();
}
// Load Labels
for (Label label : workReport.getLabels()) {
label.getName();
label.getType().getName();
}
// Load DescriptionValues
for (DescriptionValue descriptionValue : workReport
.getDescriptionValues()) {
descriptionValue.getFieldName();
}
// Load WorkReportLines
for (WorkReportLine workReportLine : workReport.getWorkReportLines()) {
//Load pricipal data
forceLoadPrincipalDataWorkReportLines(workReportLine);
// Load Labels
for (Label label : workReportLine.getLabels()) {
label.getName();
label.getType().getName();
}
// Load DescriptionValues
for (DescriptionValue descriptionValue : workReportLine
.getDescriptionValues()) {
descriptionValue.getFieldName();
}
}
}
private void forceLoadPrincipalDataWorkReportLines(WorkReportLine line) {
line.getEffort().getHours();
line.getResource().getShortDescription();
line.getTypeOfWorkHours().getName();
initalizeOrderElement(line.getOrderElement());
}
private void initalizeOrderElement(OrderElement orderElement) {
Hibernate.initialize(orderElement);
Hibernate.initialize(orderElement.getChildren());
initalizeOrder(orderElement);
}
private void initalizeOrder(OrderElement orderElement) {
OrderLineGroup parent = orderElement.getParent();
while (parent != null) {
Hibernate.initialize(parent);
parent = parent.getParent();
}
}
private void forceLoadWorkReportTypeFromDB(WorkReportType workReportType) {
this.workReportType = getWorkReportTypeFromDB(workReportType.getId());
forceLoadCollections(this.workReportType);
}
@Transactional(readOnly = true)
private WorkReportType getWorkReportTypeFromDB(Long id) {
try {
WorkReportType result = workReportTypeDAO.find(id);
return result;
} catch (InstanceNotFoundException e) {
throw new RuntimeException(e);
}
}
private void forceLoadCollections(WorkReportType workReportType) {
for (DescriptionField line : workReportType.getLineFields()) {
line.getFieldName();
}
for (DescriptionField head : workReportType.getHeadingFields()) {
head.getFieldName();
}
for (WorkReportLabelTypeAssigment assignedLabel : workReportType
.getWorkReportLabelTypeAssigments()) {
assignedLabel.getDefaultLabel().getName();
assignedLabel.getLabelType().getName();
}
}
@Override
@Transactional
public void confirmSave() throws ValidationException {
sumChargedEffortDAO.updateRelatedSumChargedEffortWithDeletedWorkReportLineSet(deletedWorkReportLinesSet);
sumChargedEffortDAO
.updateRelatedSumChargedEffortWithWorkReportLineSet(workReport
.getWorkReportLines());
workReportDAO.save(workReport);
}
@Override
@Transactional
public void generateWorkReportLinesIfIsNecessary() {
if (workReport.isCodeAutogenerated()) {
generateWorkReportLineCodes();
}
}
private void generateWorkReportLineCodes() {
workReport.generateWorkReportLineCodes(getNumberOfDigitsCode());
}
@Override
@Transactional
public OrderElement findOrderElement(String orderCode)
throws InstanceNotFoundException {
OrderElement result = orderElementDAO.findUniqueByCode(orderCode);
initializeChildren(result);
return result;
}
private void initializeChildren(OrderElement order) {
for (OrderElement each: order.getAllChildren()) {
Hibernate.initialize(each);
}
}
@Override
@Transactional
public Worker findWorker(String nif) throws InstanceNotFoundException {
return workerDAO.findUniqueByNif(nif);
}
@Override
@Transactional
public Worker asWorker(Resource resource) throws InstanceNotFoundException {
return workerDAO.find(resource.getId());
}
@Override
@Transactional(readOnly = true)
public List<WorkReportDTO> getWorkReportDTOs() {
// load the work reports DTOs
listWorkReportDTOs.clear();
for (WorkReport workReport : getAllWorkReports()) {
WorkReportDTO workReportDTO = new WorkReportDTO(workReport);
listWorkReportDTOs.add(workReportDTO);
}
return listWorkReportDTOs;
}
@Override
@Transactional(readOnly = true)
public List<WorkReportDTO> getFilterWorkReportDTOs(IPredicate predicate) {
List<WorkReportDTO> resultDTOs = new ArrayList<WorkReportDTO>();
for (WorkReportDTO workReportDTO : listWorkReportDTOs) {
if (predicate.accepts(workReportDTO)) {
resultDTOs.add(workReportDTO);
}
}
return resultDTOs;
}
private List<WorkReport> getAllWorkReports() {
List<WorkReport> result = new ArrayList<WorkReport>();
for (WorkReport each : workReportDAO
.allWorkReportsWithAssociatedOrdersUnproxied()) {
each.getWorkReportType().getName();
if (each.getResource() != null) {
each.getResource().getShortDescription();
}
if (each.getOrderElement() != null) {
each.getOrderElement().getName();
each.getOrderElement().getOrder();
}
result.add(each);
}
return result;
}
@Override
@Transactional(readOnly = true)
public List<WorkReportLine> getAllWorkReportLines() {
listWorkReportLine.clear();
for (WorkReport workReport : getAllWorkReports()) {
for (WorkReportLine workReportLine : workReport
.getWorkReportLines()) {
forceLoadPrincipalDataWorkReportLines(workReportLine);
listWorkReportLine.add(workReportLine);
}
}
return listWorkReportLine;
}
@Override
public List<WorkReportLine> getFilterWorkReportLines(IPredicate predicate) {
List<WorkReportLine> result = new ArrayList<WorkReportLine>();
for (WorkReportLine workReportLine : listWorkReportLine) {
if (predicate.accepts(workReportLine)) {
result.add(workReportLine);
}
}
return result;
}
@Override
public boolean isEditing() {
return editing;
}
@Override
public boolean isListingQuery() {
return this.listingQuery;
}
@Override
public void setListingQuery(boolean listingQuery) {
this.listingQuery = listingQuery;
}
@Override
public WorkReportLine addWorkReportLine() {
if (workReport != null) {
WorkReportLine workReportLine = WorkReportLine.create(workReport);
workReportLine.setCode("");
// Adding default date
workReportLine.setDate(new Date());
workReport.addWorkReportLine(workReportLine);
return workReportLine;
}
return null;
}
@Override
@Transactional
public void remove(WorkReport workReport) {
//before deleting the report, update OrderElement.SumChargedHours
try {
workReportDAO.reattach(workReport);
sumChargedEffortDAO
.updateRelatedSumChargedEffortWithDeletedWorkReportLineSet(workReport
.getWorkReportLines());
workReportDAO.remove(workReport.getId());
} catch (InstanceNotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public void removeWorkReportLine(WorkReportLine workReportLine) {
deletedWorkReportLinesSet.add(workReportLine);
workReport.removeWorkReportLine(workReportLine);
}
@Override
public List<WorkReportLine> getWorkReportLines() {
List<WorkReportLine> result = new ArrayList<WorkReportLine>();
if (getWorkReport() != null) {
result.addAll(workReport.getWorkReportLines());
}
return result;
}
/* Operations to manage the Description Fields and the assigned labels */
@Override
public List<Object> getFieldsAndLabelsLineByDefault() {
if ((getWorkReport() != null)) {
return sort(getWorkReportType().getLineFieldsAndLabels());
}
return new ArrayList<Object>();
}
@Override
public List<Object> getFieldsAndLabelsHeading() {
List<Object> result = new ArrayList<Object>();
if (getWorkReport() != null) {
result.addAll(getWorkReport().getDescriptionValues());
result.addAll(getWorkReport().getLabels());
return sort(result);
}
return result;
}
@Override
public List<Object> getFieldsAndLabelsLine(WorkReportLine workReportLine) {
List<Object> result = new ArrayList<Object>();
if ((getWorkReport() != null) && (workReportLine != null)) {
result.addAll(workReportLine.getDescriptionValues());
result.addAll(workReportLine.getLabels());
return sort(result);
}
return result;
}
@Override
public Map<LabelType, List<Label>> getMapAssignedLabelTypes() {
return this.mapLabelTypes;
}
@Override
public void changeLabelInWorkReportLine(Label oldLabel, Label newLabel,
WorkReportLine line) {
if (line != null) {
line.getLabels().remove(oldLabel);
line.getLabels().add(newLabel);
}
}
@Override
@Transactional(readOnly = true)
public void changeLabelInWorkReport(Label oldLabel, Label newLabel) {
if (getWorkReport() != null) {
getWorkReport().getLabels().remove(oldLabel);
getWorkReport().getLabels().add(newLabel);
}
}
private void loadMaps() {
loadLabelsByAssignedType();
}
private void loadLabelsByAssignedType() {
mapLabelTypes.clear();
//get the all assigned label types.
for (LabelType labelType : getAssignedLabelTypes()) {
List<Label> labels = new ArrayList<Label>(labelDAO
.findByType(labelType));
mapLabelTypes.put(labelType, labels);
}
}
private DescriptionField getDescriptionFieldByName(String name){
for (DescriptionField descriptionField : getWorkReportType()
.getDescriptionFields()) {
if(descriptionField.getFieldName().equals(name)){
return descriptionField;
}
}
return null;
}
private Integer getAssignedLabelIndex(Label label){
for (WorkReportLabelTypeAssigment labelTypeAssigment : getWorkReportType()
.getWorkReportLabelTypeAssigments()) {
if(labelTypeAssigment.getLabelType().equals(label.getType())){
return labelTypeAssigment.getPositionNumber();
}
}
return null;
}
private List<Object> sort(List<Object> list) {
List<Object> result = new ArrayList<Object>(list);
if (list != null) {
for (Object object : list) {
Integer index = getIndex(object);
if ((index != null) && ((index >= 0) && (index < list.size()))) {
result.set(getIndex(object), object);
}
}
}
return result;
}
private List<LabelType> getAssignedLabelTypes() {
List<LabelType> result = new ArrayList<LabelType>();
for (WorkReportLabelTypeAssigment labelTypeAssigment : getWorkReportType()
.getWorkReportLabelTypeAssigments()) {
result.add(labelTypeAssigment.getLabelType());
}
return result;
}
private Integer getIndex(Object object) {
if (object instanceof DescriptionValue) {
DescriptionField descriptionField = getDescriptionFieldByName(((DescriptionValue) object)
.getFieldName());
return descriptionField.getPositionNumber();
}
if (object instanceof Label) {
return getAssignedLabelIndex((Label) object);
}
if (object instanceof DescriptionField) {
return ((DescriptionField) object).getPositionNumber();
}
if (object instanceof WorkReportLabelTypeAssigment) {
return ((WorkReportLabelTypeAssigment) object).getPositionNumber();
}
return null;
}
@Override
public Integer getLength(DescriptionValue descriptionValue) {
DescriptionField descriptionField = getDescriptionFieldByName(descriptionValue
.getFieldName());
return descriptionField.getLength();
}
/**
* Set the selected default work report type to filter the work reports
*/
public final String SHOW_ALL_TYPES = _("Show all");
private final WorkReportType defaultType = WorkReportType.create(
SHOW_ALL_TYPES, "");
@Override
public WorkReportType getDefaultType() {
return defaultType;
}
@Override
@Transactional(readOnly = true)
public List<WorkReportType> getWorkReportTypes() {
List<WorkReportType> result = workReportTypeDAO
.list(WorkReportType.class);
return result;
}
@Override
@Transactional(readOnly = true)
public List<OrderElement> getOrderElements() {
return orderElementDAO.getAll();
}
@Override
public EntityNameEnum getEntityName() {
return EntityNameEnum.WORK_REPORT;
}
@Override
public Set<IntegrationEntity> getChildren() {
return (Set<IntegrationEntity>) (workReport != null ? workReport
.getWorkReportLines() : new HashSet<IntegrationEntity>());
}
@Override
public IntegrationEntity getCurrentEntity() {
return this.workReport;
}
@Override
@Transactional(readOnly = true)
public List<TypeOfWorkHours> getAllHoursType() {
return typeOfWorkHoursDAO.hoursTypeByNameAsc();
}
@Override
@Transactional(readOnly = true)
public List<Worker> getBoundWorkers() {
return workerDAO.getBound();
}
}
|
package liquibase.diff.output.changelog;
import liquibase.GlobalConfiguration;
import liquibase.Scope;
import liquibase.change.Change;
import liquibase.change.core.*;
import liquibase.changelog.ChangeSet;
import liquibase.configuration.core.DeprecatedConfigurationValueProvider;
import liquibase.database.*;
import liquibase.database.core.*;
import liquibase.diff.DiffResult;
import liquibase.diff.ObjectDifferences;
import liquibase.diff.compare.CompareControl;
import liquibase.diff.output.DiffOutputControl;
import liquibase.exception.DatabaseException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.executor.Executor;
import liquibase.executor.ExecutorService;
import liquibase.serializer.ChangeLogSerializer;
import liquibase.serializer.ChangeLogSerializerFactory;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.EmptyDatabaseSnapshot;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.DatabaseObjectComparator;
import liquibase.structure.core.Column;
import liquibase.structure.core.StoredDatabaseLogic;
import liquibase.util.DependencyUtil;
import liquibase.util.StringUtil;
import javax.xml.parsers.ParserConfigurationException;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;
public class DiffToChangeLog {
public static final String ORDER_ATTRIBUTE = "order";
public static final String DATABASE_CHANGE_LOG_CLOSING_XML_TAG = "</databaseChangeLog>";
public static final String EXTERNAL_FILE_DIR_SCOPE_KEY = "DiffToChangeLog.externalFilesDir";
public static final String DIFF_OUTPUT_CONTROL_SCOPE_KEY = "diffOutputControl";
public static final String DIFF_SNAPSHOT_DATABASE = "snapshotDatabase";
private String idRoot = String.valueOf(new Date().getTime());
private boolean overriddenIdRoot;
private int changeNumber = 1;
private String changeSetContext;
private String changeSetAuthor;
private String changeSetPath;
private DiffResult diffResult;
private DiffOutputControl diffOutputControl;
private boolean tryDbaDependencies = true;
private static Set<Class> loggedOrderFor = new HashSet<>();
public DiffToChangeLog(DiffResult diffResult, DiffOutputControl diffOutputControl) {
this.diffResult = diffResult;
this.diffOutputControl = diffOutputControl;
respectSchemaAndCatalogCaseIfNeeded(diffOutputControl);
}
private void respectSchemaAndCatalogCaseIfNeeded(DiffOutputControl diffOutputControl) {
if (this.diffResult.getComparisonSnapshot().getDatabase() instanceof AbstractDb2Database) {
diffOutputControl.setRespectSchemaAndCatalogCase(true);
}
}
public DiffToChangeLog(DiffOutputControl diffOutputControl) {
this.diffOutputControl = diffOutputControl;
}
public void setDiffResult(DiffResult diffResult) {
this.diffResult = diffResult;
}
public void setChangeSetContext(String changeSetContext) {
this.changeSetContext = changeSetContext;
}
public void print(String changeLogFile) throws ParserConfigurationException, IOException, DatabaseException {
this.changeSetPath = changeLogFile;
ChangeLogSerializer changeLogSerializer = ChangeLogSerializerFactory.getInstance().getSerializer(changeLogFile);
this.print(changeLogFile, changeLogSerializer);
}
public void print(PrintStream out) throws ParserConfigurationException, IOException, DatabaseException {
this.print(out, ChangeLogSerializerFactory.getInstance().getSerializer("xml"));
}
public void print(String changeLogFile, ChangeLogSerializer changeLogSerializer) throws ParserConfigurationException, IOException, DatabaseException {
this.changeSetPath = changeLogFile;
File file = new File(changeLogFile);
final Map<String, Object> newScopeObjects = new HashMap<>();
File objectsDir = null;
if (changeLogFile.toLowerCase().endsWith("sql")) {
DeprecatedConfigurationValueProvider.setData("liquibase.pro.sql.inline", "true");
} else if (this.diffResult.getComparisonSnapshot() instanceof EmptyDatabaseSnapshot) {
objectsDir = new File(file.getParentFile(), "objects");
} else {
objectsDir = new File(file.getParentFile(), "objects-" + new Date().getTime());
}
if (objectsDir != null) {
if (objectsDir.exists()) {
throw new UnexpectedLiquibaseException("The generatechangelog command would overwrite your existing stored logic files. To run this command please remove or rename the '"+objectsDir.getCanonicalPath()+"' dir in your local project directory");
}
newScopeObjects.put(EXTERNAL_FILE_DIR_SCOPE_KEY, objectsDir);
}
newScopeObjects.put(DIFF_OUTPUT_CONTROL_SCOPE_KEY, diffOutputControl);
try {
// Get a Database instance and save it in the scope for later use
DatabaseSnapshot snapshot = diffResult.getReferenceSnapshot();
Database database = determineDatabase(diffResult.getReferenceSnapshot());
if (database == null) {
database = determineDatabase(diffResult.getComparisonSnapshot());
}
newScopeObjects.put(DIFF_SNAPSHOT_DATABASE, database);
Scope.child(newScopeObjects, new Scope.ScopedRunner() {
@Override
public void run() {
try {
if (!file.exists()) {
//print changeLog only if there are available changeSets to print instead of printing it always
printNew(changeLogSerializer, file);
} else {
Scope.getCurrentScope().getLog(getClass()).info(file + " exists, appending");
ByteArrayOutputStream out = new ByteArrayOutputStream();
print(new PrintStream(out, true, GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue()), changeLogSerializer);
String xml = new String(out.toByteArray(), GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue());
String innerXml = xml.replaceFirst("(?ms).*<databaseChangeLog[^>]*>", "");
innerXml = innerXml.replaceFirst(DATABASE_CHANGE_LOG_CLOSING_XML_TAG, "");
innerXml = innerXml.trim();
if ("".equals(innerXml)) {
Scope.getCurrentScope().getLog(getClass()).info("No changes found, nothing to do");
return;
}
try (RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw")) {
String line;
long offset = 0;
boolean foundEndTag = false;
while ((line = randomAccessFile.readLine()) != null) {
int index = line.indexOf(DATABASE_CHANGE_LOG_CLOSING_XML_TAG);
if (index >= 0) {
foundEndTag = true;
break;
} else {
offset = randomAccessFile.getFilePointer();
}
}
String lineSeparator = GlobalConfiguration.OUTPUT_LINE_SEPARATOR.getCurrentValue();
if (foundEndTag) {
randomAccessFile.seek(offset);
randomAccessFile.writeBytes(" ");
randomAccessFile.write(innerXml.getBytes(GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue()));
randomAccessFile.writeBytes(lineSeparator);
randomAccessFile.writeBytes(DATABASE_CHANGE_LOG_CLOSING_XML_TAG + lineSeparator);
} else {
randomAccessFile.seek(0);
long length = randomAccessFile.length();
randomAccessFile.seek(length);
randomAccessFile.write(xml.getBytes(GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue()));
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
} catch (Exception e) {
//rethrow known exceptions. TODO: Fix this up with final Scope API
final Throwable cause = e.getCause();
if (cause instanceof ParserConfigurationException) {
throw (ParserConfigurationException) cause;
}
if (cause instanceof IOException) {
throw (IOException) cause;
}
if (cause instanceof DatabaseException) {
throw (DatabaseException) cause;
}
throw new RuntimeException(e);
}
}
// Return the Database from this snapshot
// if it is not offline
private Database determineDatabase(DatabaseSnapshot snapshot) {
Database database = snapshot.getDatabase();
DatabaseConnection connection = database.getConnection();
if (! (connection instanceof OfflineConnection) && database instanceof PostgresDatabase) {
return database;
}
return null;
}
/**
* Prints changeLog that would bring the target database to be the same as
* the reference database
*/
public void printNew(ChangeLogSerializer changeLogSerializer, File file) throws ParserConfigurationException, IOException, DatabaseException {
List<ChangeSet> changeSets = generateChangeSets();
Scope.getCurrentScope().getLog(getClass()).info("changeSets count: " + changeSets.size());
if (changeSets.isEmpty()) {
Scope.getCurrentScope().getLog(getClass()).info("No changesets to add.");
} else {
Scope.getCurrentScope().getLog(getClass()).info(file + " does not exist, creating and adding " + changeSets.size() + " changesets.");
}
try (FileOutputStream stream = new FileOutputStream(file);
PrintStream out = new PrintStream(stream, true, GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue())) {
changeLogSerializer.write(changeSets, out);
}
}
/**
* Prints changeLog that would bring the target database to be the same as
* the reference database
*/
public void print(final PrintStream out, final ChangeLogSerializer changeLogSerializer) throws ParserConfigurationException, IOException, DatabaseException {
List<ChangeSet> changeSets = generateChangeSets();
changeLogSerializer.write(changeSets, out);
out.flush();
}
public List<ChangeSet> generateChangeSets() {
final ChangeGeneratorFactory changeGeneratorFactory = ChangeGeneratorFactory.getInstance();
DatabaseObjectComparator comparator = new DatabaseObjectComparator();
String created = null;
if (GlobalConfiguration.GENERATE_CHANGESET_CREATED_VALUES.getCurrentValue()) {
created = new SimpleDateFormat("yyyy-MM-dd HH:mmZ").format(new Date());
}
List<Class<? extends DatabaseObject>> types = getOrderedOutputTypes(ChangedObjectChangeGenerator.class);
List<ChangeSet> updateChangeSets = new ArrayList<ChangeSet>();
// Keep a reference to DiffResult in the comparision database so that it can be retrieved later
// This is to avoid changing the MissingObjectChangeGenerator API and still be able to pass the
// initial DiffResult Object which can be used to check for the objects available in the database
// without doing any expensive db calls. Example usage is in MissingUniqueConstraintChangeGenerator#alreadyExists()
Database comparisionDatabase = diffResult.getComparisonSnapshot().getDatabase();
if (comparisionDatabase instanceof AbstractJdbcDatabase) {
((AbstractJdbcDatabase) comparisionDatabase).set("diffResult", diffResult);
}
for (Class<? extends DatabaseObject> type : types) {
ObjectQuotingStrategy quotingStrategy = diffOutputControl.getObjectQuotingStrategy();
for (Map.Entry<? extends DatabaseObject, ObjectDifferences> entry : diffResult.getChangedObjects(type, comparator).entrySet()) {
if (!diffResult.getReferenceSnapshot().getDatabase().isLiquibaseObject(entry.getKey()) && !diffResult.getReferenceSnapshot().getDatabase().isSystemObject(entry.getKey())) {
Change[] changes = changeGeneratorFactory.fixChanged(entry.getKey(), entry.getValue(), diffOutputControl, diffResult.getReferenceSnapshot().getDatabase(), diffResult.getComparisonSnapshot().getDatabase());
addToChangeSets(changes, updateChangeSets, quotingStrategy, created);
}
}
}
types = getOrderedOutputTypes(MissingObjectChangeGenerator.class);
List<DatabaseObject> missingObjects = new ArrayList<DatabaseObject>();
for (Class<? extends DatabaseObject> type : types) {
for (DatabaseObject object : diffResult.getMissingObjects(type, getDbObjectComparator())) {
if (object == null) {
continue;
}
if (!diffResult.getReferenceSnapshot().getDatabase().isLiquibaseObject(object) && !diffResult.getReferenceSnapshot().getDatabase().isSystemObject(object)) {
missingObjects.add(object);
}
}
}
List<ChangeSet> createChangeSets = new ArrayList<ChangeSet>();
for (DatabaseObject object : sortMissingObjects(missingObjects, diffResult.getReferenceSnapshot().getDatabase())) {
ObjectQuotingStrategy quotingStrategy = diffOutputControl.getObjectQuotingStrategy();
Change[] changes = changeGeneratorFactory.fixMissing(object, diffOutputControl, diffResult.getReferenceSnapshot().getDatabase(), diffResult.getComparisonSnapshot().getDatabase());
addToChangeSets(changes, createChangeSets, quotingStrategy, created);
}
List<ChangeSet> deleteChangeSets = new ArrayList<ChangeSet>();
types = getOrderedOutputTypes(UnexpectedObjectChangeGenerator.class);
for (Class<? extends DatabaseObject> type : types) {
ObjectQuotingStrategy quotingStrategy = diffOutputControl.getObjectQuotingStrategy();
for (DatabaseObject object : sortUnexpectedObjects(diffResult.getUnexpectedObjects(type, comparator), diffResult.getReferenceSnapshot().getDatabase())) {
if (!diffResult.getComparisonSnapshot().getDatabase().isLiquibaseObject(object) && !diffResult.getComparisonSnapshot().getDatabase().isSystemObject(object)) {
Change[] changes = changeGeneratorFactory.fixUnexpected(object, diffOutputControl, diffResult.getReferenceSnapshot().getDatabase(), diffResult.getComparisonSnapshot().getDatabase());
addToChangeSets(changes, deleteChangeSets, quotingStrategy, created);
}
}
}
// remove the diffResult from the database object
if (comparisionDatabase instanceof AbstractJdbcDatabase) {
((AbstractJdbcDatabase) comparisionDatabase).set("diffResult", null);
}
List<ChangeSet> changeSets = new ArrayList<ChangeSet>();
changeSets.addAll(createChangeSets);
changeSets.addAll(deleteChangeSets);
changeSets.addAll(updateChangeSets);
return changeSets;
}
private DatabaseObjectComparator getDbObjectComparator() {
return new DatabaseObjectComparator() {
@Override
public int compare(DatabaseObject o1, DatabaseObject o2) {
if (o1 instanceof Column && o1.getAttribute(ORDER_ATTRIBUTE, Integer.class) != null && o2.getAttribute(ORDER_ATTRIBUTE, Integer.class) != null) {
int i = o1.getAttribute(ORDER_ATTRIBUTE, Integer.class).compareTo(o2.getAttribute(ORDER_ATTRIBUTE, Integer.class));
if (i != 0) {
return i;
}
} else if (o1 instanceof StoredDatabaseLogic && o1.getAttribute(ORDER_ATTRIBUTE, Integer.class) != null
&& o2.getAttribute(ORDER_ATTRIBUTE, Integer.class) != null) {
int order = o1.getAttribute(ORDER_ATTRIBUTE, Long.class).compareTo(o2.getAttribute(ORDER_ATTRIBUTE, Long.class));
if (order != 0) {
return order;
}
}
return super.compare(o1, o2);
}
};
}
private List<DatabaseObject> sortUnexpectedObjects(Collection<? extends DatabaseObject> unexpectedObjects, Database database) {
return sortObjects("unexpected", (Collection<DatabaseObject>) unexpectedObjects, database);
}
private List<DatabaseObject> sortMissingObjects(Collection<DatabaseObject> missingObjects, Database database) {
return sortObjects("missing", missingObjects, database);
}
private List<DatabaseObject> sortObjects(final String type, Collection<DatabaseObject> objects, Database database) {
if (!objects.isEmpty() && supportsSortingObjects(database) && (database.getConnection() != null) && !(database.getConnection() instanceof OfflineConnection)) {
List<String> schemas = new ArrayList<>();
CompareControl.SchemaComparison[] schemaComparisons = this.diffOutputControl.getSchemaComparisons();
if (schemaComparisons != null) {
for (CompareControl.SchemaComparison comparison : schemaComparisons) {
String schemaName = comparison.getReferenceSchema().getSchemaName();
if (schemaName == null) {
schemaName = database.getDefaultSchemaName();
}
schemas.add(schemaName);
}
}
if (schemas.isEmpty()) {
schemas.add(database.getDefaultSchemaName());
}
try {
final List<String> dependencyOrder = new ArrayList<>();
DependencyUtil.NodeValueListener<String> nameListener = new DependencyUtil.NodeValueListener<String>() {
@Override
public void evaluating(String nodeValue) {
dependencyOrder.add(nodeValue);
}
};
DependencyUtil.DependencyGraph<String> graph = new DependencyUtil.DependencyGraph<String>(nameListener);
addDependencies(graph, schemas, database);
graph.computeDependencies();
if (!dependencyOrder.isEmpty()) {
final List<DatabaseObject> toSort = new ArrayList<>();
final List<DatabaseObject> toNotSort = new ArrayList<>();
for (DatabaseObject obj : objects) {
if (!(obj instanceof Column)) {
String schemaName = null;
if (obj.getSchema() != null) {
schemaName = obj.getSchema().getName();
}
String name = schemaName + "." + obj.getName();
if (dependencyOrder.contains(name)) {
toSort.add(obj);
} else {
toNotSort.add(obj);
}
} else {
toNotSort.add(obj);
}
}
Collections.sort(toSort, new Comparator<DatabaseObject>() {
@Override
public int compare(DatabaseObject o1, DatabaseObject o2) {
String o1Schema = null;
if (o1.getSchema() != null) {
o1Schema = o1.getSchema().getName();
}
String o2Schema = null;
if (o2.getSchema() != null) {
o2Schema = o2.getSchema().getName();
}
Integer o1Order = dependencyOrder.indexOf(o1Schema + "." + o1.getName());
int o2Order = dependencyOrder.indexOf(o2Schema + "." + o2.getName());
int order = o1Order.compareTo(o2Order);
if ("unexpected".equals(type)) {
order = order * -1;
}
return order;
}
});
toSort.addAll(toNotSort);
return toSort;
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).fine("Cannot get object dependencies: " + e.getMessage());
}
}
return new ArrayList<>(objects);
}
private List<Map<String, ?>> queryForDependenciesOracle(Executor executor, List<String> schemas)
throws DatabaseException {
List<Map<String, ?>> rs = null;
try {
if (tryDbaDependencies) {
rs = executor.queryForList(new RawSqlStatement("select OWNER, NAME, REFERENCED_OWNER, REFERENCED_NAME from DBA_DEPENDENCIES where REFERENCED_OWNER != 'SYS' AND NOT(NAME LIKE 'BIN$%') AND NOT(OWNER = REFERENCED_OWNER AND NAME = REFERENCED_NAME) AND (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "OWNER='" + obj + "'";
}
}
) + ")"));
} else {
rs = executor.queryForList(new RawSqlStatement("select NAME, REFERENCED_OWNER, REFERENCED_NAME from USER_DEPENDENCIES where REFERENCED_OWNER != 'SYS' AND NOT(NAME LIKE 'BIN$%') AND NOT(NAME = REFERENCED_NAME) AND (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "REFERENCED_OWNER='" + obj + "'";
}
}
) + ")"));
}
} catch (DatabaseException dbe) {
// If our exception is for something other than a missing table/view
// then we just re-throw the exception
// else if we can't see USER_DEPENDENCIES then we also re-throw
// to stop the recursion
String message = dbe.getMessage();
if (!message.contains("ORA-00942: table or view does not exist")) {
throw new DatabaseException(dbe);
} else if (!tryDbaDependencies) {
throw new DatabaseException(dbe);
}
Scope.getCurrentScope().getLog(getClass()).warning("Unable to query DBA_DEPENDENCIES table. Switching to USER_DEPENDENCIES");
tryDbaDependencies = false;
return queryForDependenciesOracle(executor, schemas);
}
return rs;
}
/**
* Used by {@link #sortMissingObjects(Collection, Database)} to determine whether to go into the sorting logic.
*/
protected boolean supportsSortingObjects(Database database) {
return (database instanceof AbstractDb2Database) || (database instanceof MSSQLDatabase) || (database instanceof
OracleDatabase) || database instanceof PostgresDatabase;
}
/**
* Adds dependencies to the graph as schema.object_name.
*/
protected void addDependencies(DependencyUtil.DependencyGraph<String> graph, List<String> schemas, Database database) throws DatabaseException {
if (database instanceof DB2Database) {
Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
List<Map<String, ?>> rs = executor.queryForList(new RawSqlStatement("select TABSCHEMA, TABNAME, BSCHEMA, BNAME from syscat.tabdep where (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "TABSCHEMA='" + obj + "'";
}
}
) + ")"));
for (Map<String, ?> row : rs) {
String tabName = StringUtil.trimToNull((String) row.get("TABSCHEMA")) + "." + StringUtil.trimToNull((String) row.get("TABNAME"));
String bName = StringUtil.trimToNull((String) row.get("BSCHEMA")) + "." + StringUtil.trimToNull((String) row.get("BNAME"));
graph.add(bName, tabName);
}
} else if (database instanceof Db2zDatabase) {
Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
String db2ZosSql = "SELECT DSCHEMA AS TABSCHEMA, DNAME AS TABNAME, BSCHEMA, BNAME FROM SYSIBM.SYSDEPENDENCIES WHERE (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "DSCHEMA='" + obj + "'";
}
}
) + ")";
List<Map<String, ?>> rs = executor.queryForList(new RawSqlStatement(db2ZosSql));
for (Map<String, ?> row : rs) {
String tabName = StringUtil.trimToNull((String) row.get("TABSCHEMA")) + "." + StringUtil.trimToNull((String) row.get("TABNAME"));
String bName = StringUtil.trimToNull((String) row.get("BSCHEMA")) + "." + StringUtil.trimToNull((String) row.get("BNAME"));
graph.add(bName, tabName);
}
} else if (database instanceof OracleDatabase) {
Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
List<Map<String, ?>> rs = queryForDependenciesOracle(executor, schemas);
for (Map<String, ?> row : rs) {
String tabName = null;
if (tryDbaDependencies) {
tabName =
StringUtil.trimToNull((String) row.get("OWNER")) + "." +
StringUtil.trimToNull((String) row.get("NAME"));
} else {
tabName =
StringUtil.trimToNull((String) row.get("REFERENCED_OWNER")) + "." +
StringUtil.trimToNull((String) row.get("NAME"));
}
String bName =
StringUtil.trimToNull((String) row.get("REFERENCED_OWNER")) + "." +
StringUtil.trimToNull((String) row.get("REFERENCED_NAME"));
graph.add(bName, tabName);
}
} else if (database instanceof MSSQLDatabase) {
Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
String sql = "select object_schema_name(referencing_id) as referencing_schema_name, object_name(referencing_id) as referencing_name, object_name(referenced_id) as referenced_name, object_schema_name(referenced_id) as referenced_schema_name from sys.sql_expression_dependencies depz where (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "object_schema_name(referenced_id)='" + obj + "'";
}
}
) + ")";
sql += " UNION select object_schema_name(object_id) as referencing_schema_name, object_name(object_id) as referencing_name, object_name(parent_object_id) as referenced_name, object_schema_name(parent_object_id) as referenced_schema_name " +
"from sys.objects " +
"where parent_object_id > 0 " +
"and is_ms_shipped=0 " +
"and (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "object_schema_name(object_id)='" + obj + "'";
}
}
) + ")";
sql += " UNION select object_schema_name(fk.object_id) as referencing_schema_name, fk.name as referencing_name, i.name as referenced_name, object_schema_name(i.object_id) as referenced_schema_name " +
"from sys.foreign_keys fk " +
"join sys.indexes i on fk.referenced_object_id=i.object_id and fk.key_index_id=i.index_id " +
"where fk.is_ms_shipped=0 " +
"and (" + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "object_schema_name(fk.object_id)='" + obj + "'";
}
}
) + ")";
sql += " UNION select object_schema_name(i.object_id) as referencing_schema_name, object_name(i.object_id) as referencing_name, s.name as referenced_name, null as referenced_schema_name " +
"from sys.indexes i " +
"join sys.partition_schemes s on i.data_space_id = s.data_space_id";
sql += " UNION select null as referencing_schema_name, s.name as referencing_name, f.name as referenced_name, null as referenced_schema_name from sys.partition_functions f " +
"join sys.partition_schemes s on s.function_id=f.function_id";
sql += " UNION select null as referencing_schema_name, s.name as referencing_name, fg.name as referenced_name, null as referenced_schema_name from sys.partition_schemes s " +
"join sys.destination_data_spaces ds on s.data_space_id=ds.partition_scheme_id " +
"join sys.filegroups fg on ds.data_space_id=fg.data_space_id";
//get data file -> filegroup dependencies
sql += " UNION select distinct null as referencing_schema_name, f.name as referencing_name, ds.name as referenced_name, null as referenced_schema_name from sys.database_files f " +
"join sys.data_spaces ds on f.data_space_id=ds.data_space_id " +
"where f.data_space_id > 1";
//get table -> filestream dependencies
sql += " UNION select object_schema_name(t.object_id) as referencing_schema_name, t.name as referencing_name, ds.name as referenced_name, null as referenced_schema_name from sys.tables t " +
"join sys.data_spaces ds on t.filestream_data_space_id=ds.data_space_id " +
"where t.filestream_data_space_id > 1";
//get table -> filestream dependencies
sql += " UNION select object_schema_name(t.object_id) as referencing_schema_name, t.name as referencing_name, ds.name as referenced_name, null as referenced_schema_name from sys.tables t " +
"join sys.data_spaces ds on t.lob_data_space_id=ds.data_space_id " +
"where t.lob_data_space_id > 1";
//get index -> filegroup dependencies
sql += " UNION select object_schema_name(i.object_id) as referencing_schema_name, i.name as referencing_name, ds.name as referenced_name, null as referenced_schema_name from sys.indexes i " +
"join sys.data_spaces ds on i.data_space_id=ds.data_space_id " +
"where i.data_space_id > 1";
//get index -> table dependencies
sql += " UNION select object_schema_name(i.object_id) as referencing_schema_name, i.name as referencing_name, object_name(i.object_id) as referenced_name, object_schema_name(i.object_id) as referenced_schema_name from sys.indexes i " +
"where " + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "object_schema_name(i.object_id)='" + obj + "'";
}
});
//get schema -> base object dependencies
sql += " UNION SELECT SCHEMA_NAME(SCHEMA_ID) as referencing_schema_name, name as referencing_name, PARSENAME(BASE_OBJECT_NAME,1) AS referenced_name, (CASE WHEN PARSENAME(BASE_OBJECT_NAME,2) IS NULL THEN schema_name(schema_id) else PARSENAME(BASE_OBJECT_NAME,2) END) AS referenced_schema_name FROM sys.synonyms WHERE is_ms_shipped='false' AND " + StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return "SCHEMA_NAME(SCHEMA_ID)='" + obj + "'";
}
});
//get non-clustered indexes -> unique clustered indexes on views dependencies
sql += " UNION select object_schema_name(c.object_id) as referencing_schema_name, c.name as referencing_name, object_schema_name(nc.object_id) as referenced_schema_name, nc.name as referenced_name from sys.indexes c join sys.indexes nc on c.object_id=nc.object_id JOIN sys.objects o ON c.object_id = o.object_id where c.index_id != nc.index_id and c.type_desc='CLUSTERED' and c.is_unique='true' and (not(nc.type_desc='CLUSTERED') OR nc.is_unique='false') AND o.type_desc='VIEW' AND o.name='AR_DETAIL_OPEN'";
List<Map<String, ?>> rs = executor.queryForList(new RawSqlStatement(sql));
if (!rs.isEmpty()) {
for (Map<String, ?> row : rs) {
String bName = StringUtil.trimToNull((String) row.get("REFERENCED_SCHEMA_NAME")) + "." + StringUtil.trimToNull((String) row.get("REFERENCED_NAME"));
String tabName = StringUtil.trimToNull((String) row.get("REFERENCING_SCHEMA_NAME")) + "." + StringUtil.trimToNull((String) row.get("REFERENCING_NAME"));
if (!bName.equals(tabName)) {
graph.add(bName, tabName);
}
}
}
} else if (database instanceof PostgresDatabase) {
final String sql = queryForDependenciesPostgreSql(schemas);
final Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
final List<Map<String, ?>> queryForListResult = executor.queryForList(new RawSqlStatement(sql));
for (Map<String, ?> row : queryForListResult) {
String bName = StringUtil.trimToEmpty((String) row.get("REFERENCING_SCHEMA_NAME")) +
"." + StringUtil.trimToEmpty((String)row.get("REFERENCING_NAME"));
String tabName = StringUtil.trimToEmpty((String)row.get("REFERENCED_SCHEMA_NAME")) +
"." + StringUtil.trimToEmpty((String)row.get("REFERENCED_NAME"));
if (!(tabName.isEmpty() || bName.isEmpty())) {
graph.add(bName.replace("\"", ""), tabName.replace("\"", ""));
graph.add(bName.replace("\"", "").replaceAll("\\s*\\([^)]*\\)\\s*",""),
tabName.replace("\"", "").replaceAll("\\s*\\([^)]*\\)\\s*", ""));
}
}
}
}
private String queryForDependenciesPostgreSql(List<String> schemas) {
return "WITH RECURSIVE preference AS (\n" +
" SELECT 10 AS max_depth -- The deeper the recursion goes, the slower it performs.\n" +
" , 16384 AS min_oid -- user objects only\n" +
" , '^(londiste|pgq|pg_toast)'::text AS schema_exclusion\n" +
" , '^pg_(conversion|language|ts_(dict|template))'::text AS class_exclusion\n" +
" , '{\"SCHEMA\":\"00\", \"TABLE\":\"01\", \"CONSTRAINT\":\"02\", \"DEFAULT\":\"03\",\n" +
" \"INDEX\":\"05\", \"SEQUENCE\":\"06\", \"TRIGGER\":\"07\", \"FUNCTION\":\"08\",\n" +
" \"VIEW\":\"10\", \"MVIEW\":\"11\", \"FOREIGN\":\"12\"}'::json AS type_ranks),\n" +
" dependency_pair AS (\n" +
" WITH relation_object AS ( SELECT oid, oid::regclass::text AS object_name FROM pg_class )\n" +
" SELECT DISTINCT " +
" substring(pg_identify_object(classid, objid, 0)::text, E'(\\\\w+?)\\\\.') as referenced_schema_name, " +
" CASE classid\n" +
" WHEN 'pg_constraint'::regclass THEN (SELECT CONTYPE FROM pg_constraint WHERE oid = objid)\n" +
" ELSE objid::text\n" +
" END AS CONTYPE,\n" +
" CASE classid\n" +
" WHEN 'pg_attrdef'::regclass THEN (SELECT attname FROM pg_attrdef d JOIN pg_attribute c ON (c.attrelid,c.attnum)=(d.adrelid,d.adnum) WHERE d.oid = objid)\n" +
" WHEN 'pg_cast'::regclass THEN (SELECT concat(castsource::regtype::text, ' AS ', casttarget::regtype::text,' WITH ', castfunc::regprocedure::text) FROM pg_cast WHERE oid = objid)\n" +
" WHEN 'pg_class'::regclass THEN rel.object_name\n" +
" WHEN 'pg_constraint'::regclass THEN (SELECT conname FROM pg_constraint WHERE oid = objid)\n" +
" WHEN 'pg_extension'::regclass THEN (SELECT extname FROM pg_extension WHERE oid = objid)\n" +
" WHEN 'pg_namespace'::regclass THEN (SELECT nspname FROM pg_namespace WHERE oid = objid)\n" +
" WHEN 'pg_opclass'::regclass THEN (SELECT opcname FROM pg_opclass WHERE oid = objid)\n" +
" WHEN 'pg_operator'::regclass THEN (SELECT oprname FROM pg_operator WHERE oid = objid)\n" +
" WHEN 'pg_opfamily'::regclass THEN (SELECT opfname FROM pg_opfamily WHERE oid = objid)\n" +
" WHEN 'pg_proc'::regclass THEN objid::regprocedure::text\n" +
" WHEN 'pg_rewrite'::regclass THEN (SELECT ev_class::regclass::text FROM pg_rewrite WHERE oid = objid)\n" +
" WHEN 'pg_trigger'::regclass THEN (SELECT tgname FROM pg_trigger WHERE oid = objid)\n" +
" WHEN 'pg_type'::regclass THEN objid::regtype::text\n" +
" ELSE objid::text\n" +
" END AS REFERENCED_NAME,\n" +
" substring(pg_identify_object(refclassid, refobjid, 0)::text, E'(\\\\w+?)\\\\.') as referencing_schema_name, " +
" CASE refclassid\n" +
" WHEN 'pg_namespace'::regclass THEN (SELECT nspname FROM pg_namespace WHERE oid = refobjid)\n" +
" WHEN 'pg_class'::regclass THEN rrel.object_name\n" +
" WHEN 'pg_opfamily'::regclass THEN (SELECT opfname FROM pg_opfamily WHERE oid = refobjid)\n" +
" WHEN 'pg_proc'::regclass THEN refobjid::regprocedure::text\n" +
" WHEN 'pg_type'::regclass THEN refobjid::regtype::text\n" +
" ELSE refobjid::text\n" +
" END AS REFERENCING_NAME\n" +
" FROM pg_depend dep\n" +
" LEFT JOIN relation_object rel ON rel.oid = dep.objid\n" +
" LEFT JOIN relation_object rrel ON rrel.oid = dep.refobjid, preference\n" +
" WHERE deptype = ANY('{n,a}')\n" +
" AND objid >= preference.min_oid\n" +
" AND (refobjid >= preference.min_oid OR refobjid = 2200) -- need public schema as root node\n" +
" AND classid::regclass::text !~ preference.class_exclusion\n" +
" AND refclassid::regclass::text !~ preference.class_exclusion\n" +
" AND COALESCE(SUBSTRING(objid::regclass::text, E'^(\\\\\\\\w+)\\\\\\\\.'),'') !~ preference.schema_exclusion\n" +
" AND COALESCE(SUBSTRING(refobjid::regclass::text, E'^(\\\\\\\\w+)\\\\\\\\.'),'') !~ preference.schema_exclusion\n" +
" GROUP BY classid, objid, refclassid, refobjid, deptype, rel.object_name, rrel.object_name\n" +
" )\n" +
" select referenced_schema_name,\n" +
" (CASE\n" +
" WHEN position('.' in referenced_name) >0 THEN substring(referenced_name from position('.' in referenced_name)+1 for length(referenced_name))\n" +
" ELSE referenced_name\n" +
" END) AS referenced_name, \n" +
" referencing_schema_name,\n" +
" (CASE\n" +
" WHEN position('.' in referencing_name) >0 THEN substring(referencing_name from position('.' in referencing_name)+1 for length(referencing_name))\n" +
" ELSE referencing_name\n" +
" END) AS referencing_name from dependency_pair where REFERENCED_NAME != REFERENCING_NAME " +
" AND (" +
StringUtil.join(schemas, " OR ", new StringUtil.StringUtilFormatter<String>() {
@Override
public String toString(String obj) {
return " REFERENCED_NAME like '" + obj + ".%' OR REFERENCED_NAME NOT LIKE '%.%'";
}
}) + ")\n" +
" AND CONTYPE::text != 'p'\n" +
" AND referencing_schema_name is not null and referencing_name is not null";
}
protected List<Class<? extends DatabaseObject>> getOrderedOutputTypes(Class<? extends ChangeGenerator> generatorType) {
Database comparisonDatabase = diffResult.getComparisonSnapshot().getDatabase();
DependencyGraph graph = new DependencyGraph();
for (Class<? extends DatabaseObject> type : diffResult.getReferenceSnapshot().getSnapshotControl().getTypesToInclude()) {
graph.addType(type);
}
List<Class<? extends DatabaseObject>> types = graph.sort(comparisonDatabase, generatorType);
if (!loggedOrderFor.contains(generatorType)) {
String log = generatorType.getSimpleName() + " type order: ";
for (Class<? extends DatabaseObject> type : types) {
log += " " + type.getName();
}
Scope.getCurrentScope().getLog(getClass()).fine(log);
loggedOrderFor.add(generatorType);
}
return types;
}
private void addToChangeSets(Change[] changes, List<ChangeSet> changeSets, ObjectQuotingStrategy quotingStrategy, String created) {
if (changes != null) {
String csContext = this.changeSetContext;
if (diffOutputControl.getContext() != null) {
csContext = diffOutputControl.getContext().toString().replaceFirst("^\\(", "")
.replaceFirst("\\)$", "");
}
if (useSeparateChangeSets(changes)) {
for (Change change : changes) {
ChangeSet changeSet = new ChangeSet(generateId(changes), getChangeSetAuthor(), false, false, this.changeSetPath, changeSetContext,
null, false, quotingStrategy, null);
changeSet.setCreated(created);
if (diffOutputControl.getLabels() != null) {
changeSet.setLabels(diffOutputControl.getLabels());
}
changeSet.addChange(change);
changeSets.add(changeSet);
}
} else {
ChangeSet changeSet = new ChangeSet(generateId(changes), getChangeSetAuthor(), false, false, this.changeSetPath, csContext,
null, false, quotingStrategy, null);
changeSet.setCreated(created);
if (diffOutputControl.getLabels() != null) {
changeSet.setLabels(diffOutputControl.getLabels());
}
for (Change change : changes) {
changeSet.addChange(change);
}
changeSets.add(changeSet);
}
}
}
protected boolean useSeparateChangeSets(Change[] changes) {
boolean sawAutocommitBefore = false;
for (Change change : changes) {
boolean thisStatementAutocommits = true;
if ((change instanceof InsertDataChange
|| change instanceof DeleteDataChange
|| change instanceof UpdateDataChange
|| change instanceof LoadDataChange
)) {
thisStatementAutocommits = false;
}
if (change instanceof RawSQLChange) {
if (((RawSQLChange) change).getSql().trim().matches("SET\\s+\\w+\\s+\\w+")) {
//don't separate out when there is a `SET X Y` statement
thisStatementAutocommits = false;
}
}
if (thisStatementAutocommits) {
if (sawAutocommitBefore) {
return true;
} else {
sawAutocommitBefore = true;
}
}
}
return false;
}
protected String getChangeSetAuthor() {
if (changeSetAuthor != null) {
return changeSetAuthor;
}
String author = System.getProperty("user.name");
if (StringUtil.trimToNull(author) == null) {
return "diff-generated";
} else {
return author + " (generated)";
}
}
public void setChangeSetAuthor(String changeSetAuthor) {
this.changeSetAuthor = changeSetAuthor;
}
public String getChangeSetPath() {
return changeSetPath;
}
public void setChangeSetPath(String changeSetPath) {
this.changeSetPath = changeSetPath;
}
public void setIdRoot(String idRoot) {
this.idRoot = idRoot;
this.overriddenIdRoot = true;
}
protected String generateId(Change[] changes) {
String desc = "";
if (GlobalConfiguration.GENERATED_CHANGESET_IDS_INCLUDE_DESCRIPTION.getCurrentValue()) {
if (!overriddenIdRoot) { //switch timestamp to a shorter string (last 4 digits in base 36 format). Still mostly unique, but shorter since we also now have mostly-unique descriptions of the changes
this.idRoot = Long.toString(Long.decode(idRoot), 36);
idRoot = idRoot.substring(idRoot.length() - 4);
this.overriddenIdRoot = true;
}
if ((changes != null) && (changes.length > 0)) {
desc = " ("+ StringUtil.join(changes, " :: ", new StringUtil.StringUtilFormatter<Change>() {
@Override
public String toString(Change obj) {
return obj.getDescription();
}
}) + ")";
}
if (desc.length() > 150) {
desc = desc.substring(0, 146) + "...)";
}
}
return idRoot + "-" + changeNumber++ + desc;
}
private static class DependencyGraph {
private Map<Class<? extends DatabaseObject>, Node> allNodes = new HashMap<>();
private void addType(Class<? extends DatabaseObject> type) {
allNodes.put(type, new Node(type));
}
public List<Class<? extends DatabaseObject>> sort(Database database, Class<? extends ChangeGenerator> generatorType) {
Map<Class<? extends DatabaseObject>, Node> newNodes = new HashMap<>();
ChangeGeneratorFactory changeGeneratorFactory = ChangeGeneratorFactory.getInstance();
for (Class<? extends DatabaseObject> type : allNodes.keySet()) {
// For both run* types
// make sure that if the Node does not exist
// it gets created and saved in the newNodes map
for (Class<? extends DatabaseObject> afterType : changeGeneratorFactory.runBeforeTypes(type, database, generatorType)) {
Node typeNode = retrieveOrCreateNode(newNodes, type);
Node afterTypeNode = retrieveOrCreateNode(newNodes, afterType);
typeNode.addEdge(afterTypeNode);
}
for (Class<? extends DatabaseObject> beforeType : changeGeneratorFactory.runAfterTypes(type, database, generatorType)) {
Node beforeTypeNode = retrieveOrCreateNode(newNodes, beforeType);
Node typeNode = retrieveOrCreateNode(newNodes, type);
beforeTypeNode.addEdge(typeNode);
}
}
// Add any newly created Node objects to the allNodes map
for (Node newNode : newNodes.values()) {
if (! allNodes.containsKey(newNode.type)) {
allNodes.put(newNode.type, newNode);
}
}
ArrayList<Node> returnNodes = new ArrayList<>();
SortedSet<Node> nodesWithNoIncomingEdges = new TreeSet<>(new Comparator<Node>() {
@Override
public int compare(Node o1, Node o2) {
return o1.type.getName().compareTo(o2.type.getName());
}
});
for (Node n : allNodes.values()) {
if (n.inEdges.isEmpty()) {
nodesWithNoIncomingEdges.add(n);
}
}
while (!nodesWithNoIncomingEdges.isEmpty()) {
Node node = nodesWithNoIncomingEdges.iterator().next();
nodesWithNoIncomingEdges.remove(node);
returnNodes.add(node);
for (Iterator<Edge> it = node.outEdges.iterator(); it.hasNext(); ) {
//remove edge e from the graph
Edge edge = it.next();
Node nodePointedTo = edge.to;
it.remove();//Remove edge from node
nodePointedTo.inEdges.remove(edge);//Remove edge from nodePointedTo
//if nodePointedTo has no other incoming edges then insert nodePointedTo into nodesWithNoIncomingEdges
if (nodePointedTo.inEdges.isEmpty()) {
nodesWithNoIncomingEdges.add(nodePointedTo);
}
}
}
checkForCycleInDependencies(generatorType);
List<Class<? extends DatabaseObject>> returnList = new ArrayList<>();
for (Node node : returnNodes) {
returnList.add(node.type);
}
return returnList;
}
// If the Node for this type already exists then return it
// else look in the newNodes map for one
// else create a new Node and put it in the newNodes map
private Node retrieveOrCreateNode(Map<Class<? extends DatabaseObject>, Node> newNodes, Class<? extends DatabaseObject> type) {
Node node;
if (allNodes.containsKey(type)) {
node = allNodes.get(type);
} else if (newNodes.containsKey(type)) {
node = newNodes.get(type);
}
else {
node = new Node(type);
newNodes.put(type, node);
}
return node;
}
private void checkForCycleInDependencies(Class<? extends ChangeGenerator> generatorType) {
//Check to see if all edges are removed
for (Node n : allNodes.values()) {
if (!n.inEdges.isEmpty()) {
String message = "Could not resolve " + generatorType.getSimpleName() + " dependencies due " +
"to dependency cycle. Dependencies: \n";
for (Node node : allNodes.values()) {
SortedSet<String> fromTypes = new TreeSet<>();
SortedSet<String> toTypes = new TreeSet<>();
for (Edge edge : node.inEdges) {
fromTypes.add(edge.from.type.getSimpleName());
}
for (Edge edge : node.outEdges) {
toTypes.add(edge.to.type.getSimpleName());
}
String from = StringUtil.join(fromTypes, ",");
String to = StringUtil.join(toTypes, ",");
message += " [" + from + "] -> " + node.type.getSimpleName() + " -> [" + to + "]\n";
}
throw new UnexpectedLiquibaseException(message);
}
}
}
private Node getNode(Class<? extends DatabaseObject> type) {
Node node = allNodes.get(type);
if (node == null) {
node = new Node(type);
}
return node;
}
static class Node {
public final Class<? extends DatabaseObject> type;
public final HashSet<Edge> inEdges;
public final HashSet<Edge> outEdges;
public Node(Class<? extends DatabaseObject> type) {
this.type = type;
inEdges = new HashSet<>();
outEdges = new HashSet<>();
}
public Node addEdge(Node node) {
Edge e = new Edge(this, node);
outEdges.add(e);
node.inEdges.add(e);
return this;
}
@Override
public String toString() {
return type.getName();
}
}
static class Edge {
public final Node from;
public final Node to;
public Edge(Node from, Node to) {
this.from = from;
this.to = to;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof Edge)) {
return false;
}
Edge e = (Edge) obj;
return (e.from == from) && (e.to == to);
}
@Override
public int hashCode() {
return (this.from.toString() + "." + this.to.toString()).hashCode();
}
}
}
}
|
package nl.uva.cs.lobcder.webDav.resources;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.bradmcevoy.common.Path;
import com.bradmcevoy.http.Resource;
import com.bradmcevoy.http.ResourceFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import nl.uva.vlet.exception.VlException;
import nl.uva.cs.lobcder.catalogue.IDRCatalogue;
import nl.uva.cs.lobcder.catalogue.SimpleDRCatalogue;
import nl.uva.cs.lobcder.resources.IDataResourceEntry;
import nl.uva.cs.lobcder.resources.DataResourceEntry;
import nl.uva.cs.lobcder.resources.ResourceFileEntry;
import nl.uva.cs.lobcder.resources.ResourceFolderEntry;
public class DataResourceFactory implements ResourceFactory {
private Logger log = LoggerFactory.getLogger(DataResourceFactory.class);
public static final String REALM = "vph-share";
private IDRCatalogue catalogue;
public DataResourceFactory() throws URISyntaxException, VlException, IOException {
catalogue = new SimpleDRCatalogue();
}
@Override
public Resource getResource(String host, String strPath) {
IDataResourceEntry rootEntry;
Collection<IDataResourceEntry> topLevelEntries;
try {
//Gets the root path. If instead we called :'ldri = Path.path(strPath);' we get back '/lobcder-1.0-SNAPSHOT'
Path ldri = Path.path(strPath).getStripFirst();
debug("
if (ldri.isRoot() || ldri.toString().equals("")) {
rootEntry = catalogue.getResourceEntryByLDRI(ldri);
if (rootEntry == null) {
rootEntry = new DataResourceEntry(ldri);
topLevelEntries = catalogue.getTopLevelResourceEntries();
for (IDataResourceEntry e : topLevelEntries) {
debug("Root elements: "+e.getUID()+" "+e.getLDRI());
rootEntry.addChild(e.getLDRI());
}
}
return new DataDirResource(new ResourceFolderEntry(Path.path("/hello")));
}
return getResource(ldri);
} catch (IOException ex) {
java.util.logging.Logger.getLogger(DataResourceFactory.class.getName()).log(Level.SEVERE, null, ex);
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(DataResourceFactory.class.getName()).log(Level.SEVERE, null, ex);
} catch (Exception ex) {
java.util.logging.Logger.getLogger(DataResourceFactory.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
private void debug(String msg) {
System.err.println(this.getClass().getSimpleName() + ": " + msg);
log.debug(msg);
}
private Resource getResource(Path path) throws Exception {
DataResourceEntry entry = (DataResourceEntry) catalogue.getResourceEntryByLDRI(path);
if (entry == null) {
throw new NullPointerException("Path " + path + " doesn't exist");
}
if (entry instanceof ResourceFolderEntry) {
return new DataDirResource(entry);
}
if (entry instanceof ResourceFileEntry) {
return new DataFileResource(entry);
}
debug("Unknown Type: " + entry.getLDRI());
return new DataResource(entry);
}
}
|
package de.qaware.chronix.client.benchmark.benchmarkrunner.util;
import de.qaware.chronix.shared.QueryUtil.JsonTimeSeriesHandler;
import de.qaware.chronix.database.TimeSeriesMetaData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class TimeSeriesCounter {
private static TimeSeriesCounter instance;
private final Logger logger = LoggerFactory.getLogger(TimeSeriesCounter.class);
private JsonTimeSeriesHandler jsonTimeSeriesHandler;
private TimeSeriesCounter(){
jsonTimeSeriesHandler = JsonTimeSeriesHandler.getInstance();
}
public static TimeSeriesCounter getInstance(){
if(instance == null){
instance = new TimeSeriesCounter();
}
return instance;
}
/**
* Returns meta data for all previously imported time series.
*
* @return list of all imported meta data. Empty if nothing was imported previously.
*/
public List<TimeSeriesMetaData> getAll(){
List<TimeSeriesMetaData> metaDataList = new ArrayList<>();
File directory = new File(jsonTimeSeriesHandler.getTimeSeriesMetaDataRecordDirectoryPath());
if(directory.exists() && directory.isDirectory()){
File[] measurements = directory.listFiles();
if(measurements != null && measurements.length > 0) {
for (File measurement : measurements) {
if (measurement.isFile() && measurement.getName().endsWith(".json")) {
metaDataList.addAll(jsonTimeSeriesHandler.readTimeSeriesMetaDatafromJson(measurement.getName().replace(".json","")));
}
}
} else {
logger.error("TimeSeriesCounter: No meta data to read.");
}
}
return metaDataList;
}
/**
* Returns meta data for a random time series size times in a list. (for cache testing)
*
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getCachingTestMetaData(allMetaData, size);
}
/**
* Returns meta data for a random time series size times in a list from given meta data. (for cache testing)
*
* @param timeSeriesMetaDataList meta data from which a random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list if meta data.
*/
public List<TimeSeriesMetaData> getCachingTestMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> cachingTestMetaData = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
TimeSeriesMetaData metaData = timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size()));
for(int i = 0; i < size; i++){
cachingTestMetaData.add(metaData);
}
}
return cachingTestMetaData;
}
/**
* Returns meta data for random time series of all previously imported time series.
*
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(int size){
List<TimeSeriesMetaData> allMetaData = this.getAll();
return this.getRandomTimeSeriesMetaData(allMetaData, size);
}
/**
* Returns meta data for random time series from given meta data.
*
* @param timeSeriesMetaDataList meta data from which the random time series should be chosen.
* @param size number of how many elements the list should have.
* @return list of meta data.
*/
public List<TimeSeriesMetaData> getRandomTimeSeriesMetaData(List<TimeSeriesMetaData> timeSeriesMetaDataList, int size){
if(size < 0) size *= -1;
List<TimeSeriesMetaData> metaDataList = new ArrayList<>(size);
if(!timeSeriesMetaDataList.isEmpty()){
Random random = new Random();
for(int i = 0; i < size; i++){
TimeSeriesMetaData randomMetaData = new TimeSeriesMetaData(timeSeriesMetaDataList.get(random.nextInt(timeSeriesMetaDataList.size())));
/*logger.info("random ts: Measurement: {}, MetricName: {}, Start: {}, End: {}",
randomMetaData.getMeasurementName(),
randomMetaData.getMetricName(),
randomMetaData.getStart(),
randomMetaData.getEnd());*/
if(randomMetaData.getStart() < randomMetaData.getEnd()) {
//logger.info("Start: {}, End: {}", randomMetaData.getStart(), randomMetaData.getEnd());
long randomStart = random.longs(randomMetaData.getStart(), randomMetaData.getEnd()).iterator().next();
long randomEnd = random.longs(randomStart, randomMetaData.getEnd() + 1L).iterator().next();
randomMetaData.setStart(randomStart);
randomMetaData.setEnd(randomEnd);
metaDataList.add(randomMetaData);
}
else {
// time series only contains one point, no random start end needed
metaDataList.add(randomMetaData);
/*logger.info("start >= end: Measurement: {}, MetricName: {}, Start: {}, End: {}",
randomMetaData.getMeasurementName(),
randomMetaData.getMetricName(),
randomMetaData.getStart(),
randomMetaData.getEnd());*/
}
}
}
return metaDataList;
}
}
|
package edu.duke.cabig.c3pr.web.registration.tabs;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import org.springframework.validation.Errors;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.util.WebUtils;
import edu.duke.cabig.c3pr.dao.ParticipantDao;
import edu.duke.cabig.c3pr.dao.StudyDao;
import edu.duke.cabig.c3pr.dao.StudySiteStudyVersionDao;
import edu.duke.cabig.c3pr.domain.Epoch;
import edu.duke.cabig.c3pr.domain.Participant;
import edu.duke.cabig.c3pr.domain.ScheduledEpoch;
import edu.duke.cabig.c3pr.domain.Study;
import edu.duke.cabig.c3pr.domain.StudySiteStudyVersion;
import edu.duke.cabig.c3pr.domain.StudySubject;
import edu.duke.cabig.c3pr.utils.Lov;
import edu.duke.cabig.c3pr.utils.StringUtils;
import edu.duke.cabig.c3pr.utils.web.spring.tabbedflow.AjaxableUtils;
import edu.duke.cabig.c3pr.web.registration.StudySubjectWrapper;
public class SearchStudySubjectTab extends RegistrationTab<StudySubjectWrapper> {
private static final Logger logger = Logger.getLogger(SearchStudySubjectTab.class);
private StudyDao studyDao;
private ParticipantDao participantDao;
public void setParticipantDao(ParticipantDao participantDao) {
this.participantDao = participantDao;
}
private StudySiteStudyVersionDao studySiteStudyVersionDao;
public void setStudySiteStudyVersionDao(
StudySiteStudyVersionDao studySiteStudyVersionDao) {
this.studySiteStudyVersionDao = studySiteStudyVersionDao;
}
public void setStudyDao(StudyDao studyDao) {
this.studyDao = studyDao;
}
public SearchStudySubjectTab() {
super("Subject & Study", "Subject & Study",
"registration/select_study_or_subject");
setShowSummary("false");
}
@Override
public Map referenceData(HttpServletRequest request,
StudySubjectWrapper command) {
Map refdata=super.referenceData(request, command);
Map<String, List<Lov>> configMap = configurationProperty.getMap();
refdata.put("identifiersTypeRefData", configMap.get("participantIdentifiersType"));
refdata.put("searchTypeRefDataPrt", configMap.get("participantSearchType"));
refdata.put("searchTypeRefDataStudy", configMap.get("studySearchTypeForRegistration"));
refdata.put("administrativeGenderCode", configMap.get("administrativeGenderCode"));
refdata.put("ethnicGroupCode", configMap.get("ethnicGroupCode"));
refdata.put("raceCode", configMap.get("raceCode"));
refdata.put("identifiersTypeRefData", configMap.get("participantIdentifiersType"));
if (command.getStudySubject().getSystemAssignedIdentifiers()!= null && command.getStudySubject().getSystemAssignedIdentifiers().size()>0) {
refdata.put("disableForm", new Boolean(true));
}
refdata.put("mandatory", "true");
if(request.getParameter("from_reg_confirmation")!=null && request.getParameter("from_reg_confirmation").equals("true")){
Study study = null;
refdata.put("from_reg_confirmation", request.getParameter("from_reg_confirmation"));
if(request.getParameter("create_studyId")!= null){
study = studyDao.getById(Integer.parseInt(request.getParameter("create_studyId")));
refdata.put("create_studyId", request.getParameter("create_studyId"));
refdata.put("create_study_name", study.getShortTitleText());
refdata.put("create_study_identifier", study.getPrimaryIdentifier());
}
if(request.getParameter("create_studySiteName")!= null){
refdata.put("create_studySiteName", request.getParameter("create_studySiteName"));
}
if(request.getParameter("create_studySiteStudyVersionId")!= null){
refdata.put("create_studySiteStudyVersionId", request.getParameter("create_studySiteStudyVersionId"));
}
}
if(request.getParameter("fromStudyRegistrations")!=null && request.getParameter("fromStudyRegistrations").equals("true")){
refdata.put("fromStudyRegistrations", request.getParameter("fromStudyRegistrations"));
if(request.getParameter("createRegistration_studyId")!= null){
refdata.put("createRegistration_studyId", request.getParameter("createRegistration_studyId"));
}
}
if (request.getParameter("fromEditParticipant") != null && request.getParameter("fromEditParticipant").equals("true")) {
if (request.getSession().getAttribute("fromCreateRegistration") != null) {
refdata.put("fromUpdateParticipant", request.getSession()
.getAttribute("fromCreateRegistration"));
request.getSession().removeAttribute("fromCreateRegistration");
}
if (request.getSession().getAttribute(
"studySiteStudyVersionIdFromCreateReg") != null
&& !request.getSession().getAttribute(
"studySiteStudyVersionIdFromCreateReg").equals("")) {
String StudySiteStudyVersionId = (String) request.getSession().getAttribute("studySiteStudyVersionIdFromCreateReg");
StudySiteStudyVersion studySiteStudyVersion = studySiteStudyVersionDao.getById(Integer.parseInt(StudySiteStudyVersionId));
refdata.put("studyName", studySiteStudyVersion.getStudyVersion().getShortTitleText());
refdata.put("siteName", studySiteStudyVersion.getStudySite().getHealthcareSite().getName());
refdata.put("studyPrimaryIdentifier", studySiteStudyVersion.getStudyVersion().getStudy().getPrimaryIdentifier());
refdata.put("studySiteStudyVersionIdFromUpdateParticipant",StudySiteStudyVersionId);
request.getSession().removeAttribute("studySiteStudyVersionIdFromCreateReg");
} else if (request.getSession().getAttribute("searchedForStudy") != null && request.getSession().getAttribute("searchedForStudy")
.equals("true")) {
refdata.put("searchedForStudy", request.getSession().getAttribute("searchedForStudy"));
request.getSession().removeAttribute("searchedForStudy");
refdata.put("studySearchType", request.getSession().getAttribute("studySearchType"));
request.getSession().removeAttribute("studySearchType");
refdata.put("studySearchText", request.getSession().getAttribute("studySearchText"));
request.getSession().removeAttribute("studySearchText");
}
if (request.getParameter("participantId") != null) {
refdata.put("participantId", request.getParameter("participantId"));
Participant participant = participantDao.getById(Integer.parseInt(request.getParameter("participantId")));
refdata.put("participantName", participant.getLastName() + ""+ participant.getFirstName());
refdata.put("participantIdentifier", participant.getOrganizationAssignedIdentifiers().get(0).getType().getCode()+ " - "
+ participant.getOrganizationAssignedIdentifiers().get(0).getValue());
}
}
return refdata;
}
@Override
public void postProcess(HttpServletRequest request, StudySubjectWrapper command, Errors error) {
command.setParticipant(command.getStudySubject().getParticipant());
if (WebUtils.hasSubmitParameter(request, "epochId")) {
return;
}
if (command.getStudySubject().getParticipant() == null || command.getStudySubject().getStudySite() == null) {
request.setAttribute("alreadyRegistered", new Boolean(true));
return;
}
List registrations = studySubjectDao.searchBySubjectAndStudyIdentifiers(command.getStudySubject().
getParticipant().getPrimaryIdentifier(), command.getStudySubject().getStudySite().
getStudy().getCoordinatingCenterAssignedIdentifier());
if (registrations.size() > 0) {
request.setAttribute("alreadyRegistered", new Boolean(true));
return;
}
Integer id;
try {
id = Integer.parseInt(request.getParameter("epoch"));
}
catch (RuntimeException e) {
return;
}
Epoch epoch = epochDao.getById(id);
epochDao.initialize(epoch);
ScheduledEpoch scheduledEpoch;
if (epoch.getTreatmentIndicator()) {
(epoch).getArms().size();
scheduledEpoch = new ScheduledEpoch();
}
else {
scheduledEpoch = new ScheduledEpoch();
}
scheduledEpoch.setEpoch(epoch);
if (command.getStudySubject().getScheduledEpochs().size() == 0) command.getStudySubject().getScheduledEpochs().add(0,
scheduledEpoch);
else {
command.getStudySubject().getScheduledEpochs().set(0, scheduledEpoch);
}
registrationControllerUtils.buildCommandObject(command.getStudySubject());
registrationControllerUtils.addConsents(command.getStudySubject());
studySiteDao.initialize(command.getStudySubject().getStudySite());
}
@Override
public void validate(StudySubjectWrapper command, Errors errors) {
super.validate(command, errors);
StudySubject studySubject = ((StudySubjectWrapper)command).getStudySubject();
if(studySubject.getId()==null){
List<StudySubject> studySubjects = new ArrayList<StudySubject>();
studySubjects=studySubjectRepository.findRegistrations(studySubject);
if (studySubjects.size() > 0) {
errors.reject("duplicateRegistration","Subject already registered on this study");
}
}
}
public ModelAndView checkEpochAccrualCeiling(HttpServletRequest request, Object commandObj,
Errors error) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
int id = Integer.parseInt(request.getParameter("epochId"));
map.put("alertForCeiling",
new Boolean(studySubjectRepository.isEpochAccrualCeilingReached(id)));
return new ModelAndView(AjaxableUtils.getAjaxViewName(request), map);
}
}
|
package scala;
/** @meta class [?T] extends scala.AnyRef with java.lang.Cloneable with java.io.Serializable; */
public abstract class Array implements Cloneable, java.io.Serializable {
/** @meta constr (scala.Int); */
public Array() {}
/** @meta method []scala.Array[?T]; */
public abstract Object value();
/** @meta method []scala.Int; */
public abstract int length();
/** @meta method (scala.Int)?T; */
public abstract Object apply(int i);
/** @meta method (scala.Int,?T)scala.Unit; */
public abstract void update(int i, Object x);
}
|
package org.curriki.gwt.server;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiAttachment;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.gwt.api.client.Document;
import com.xpn.xwiki.gwt.api.client.XWikiGWTException;
import com.xpn.xwiki.gwt.api.server.XWikiServiceImpl;
import com.xpn.xwiki.objects.BaseElement;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.ListProperty;
import com.xpn.xwiki.objects.PropertyInterface;
import com.xpn.xwiki.objects.classes.ListItem;
import com.xpn.xwiki.plugin.image.ImagePlugin;
import com.xpn.xwiki.plugin.lucene.LucenePlugin;
import com.xpn.xwiki.plugin.lucene.LucenePluginApi;
import com.xpn.xwiki.plugin.lucene.SearchResult;
import com.xpn.xwiki.plugin.lucene.SearchResults;
import com.xpn.xwiki.plugin.zipexplorer.ZipExplorerPlugin;
import com.xpn.xwiki.web.XWikiEngineContext;
import com.xpn.xwiki.web.XWikiMessageTool;
import com.xpn.xwiki.web.XWikiRequest;
import com.xpn.xwiki.web.XWikiResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.curriki.gwt.client.AssetDocument;
import org.curriki.gwt.client.Constants;
import org.curriki.gwt.client.CurrikiService;
import org.curriki.gwt.client.TreeListItem;
import org.curriki.gwt.client.widgets.browseasset.AssetItem;
import org.curriki.gwt.client.widgets.template.TemplateInfo;
import org.curriki.xwiki.plugin.asset.Asset;
import org.curriki.xwiki.plugin.mimetype.MimeType;
import org.curriki.xwiki.plugin.mimetype.MimeTypePlugin;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
public class CurrikiServiceImpl extends XWikiServiceImpl implements CurrikiService {
private static final Log log = LogFactory.getLog(CurrikiServiceImpl.class);
public CurrikiServiceImpl() {
super();
}
public CurrikiServiceImpl(XWikiRequest request, XWikiResponse response, XWikiEngineContext engine) {
super(request, response, engine);
}
public boolean isDefaultCollectionExists(String space) throws XWikiGWTException {
return isCollectionExists(space, Constants.DEFAULT_COLLECTION_PAGE);
}
private boolean isCollectionExists(String space, String pageName) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument doc = context.getWiki().getDocument(space, pageName, context);
if (doc.isNew())
return false;
if (doc.getObjectNumbers(Constants.COMPOSITEASSET_CLASS) == 0){
return false;
} else {
// Work around a bug XWIKI-1624
// TODO: Remove the work-around once XWIKI-1624 is fixed
List subAssets = doc.getObjects(Constants.COMPOSITEASSET_CLASS);
Iterator i = subAssets.iterator();
int count = 0;
while (i.hasNext() && count == 0){
if (i.next() != null){
count++;
}
}
if (count == 0){
return false;
}
}
return true;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private boolean createDefaultCollection(String space) throws XWikiGWTException {
return createCollection(space, Constants.DEFAULT_COLLECTION_PAGE);
}
public boolean createCollection(String space, String pageName) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiMessageTool msg = (XWikiMessageTool) context.get("msg");
String pageTitle = msg.get("Untitled");
return (createCollection(space, pageName, pageTitle) != null);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public String createCollection(String space, String pageName, String pageTitle) throws XWikiGWTException {
Document doc = createCollectionDocument(space, pageName, pageTitle);
if (doc != null){
return doc.getFullName();
} else {
return null;
}
}
public Document createCollectionDocument(String space, String pageName, String pageTitle) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
if (space == null){
// Use default space for user
space = "Coll_" + context.getUser().replaceFirst("XWiki.", "");
}
if (pageName == null){
// Generate a random page name
pageName = context.getWiki().getUniquePageName(space, context);
}
if (pageTitle == null){
XWikiMessageTool msg = (XWikiMessageTool) context.get("msg");
pageTitle = msg.get("Untitled");
}
// TODO: Why in createCollection we don't raise an error if the collection exist instead of returning it ?
if (isCollectionExists(space, pageName)){
try {
XWikiDocument doc = context.getWiki().getDocument(space, pageName, context);
return newDocument(new Document(), doc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
if (!isRootCollectionExists(space, context)){
createRootCollection(space, context);
}
/* CURRIKI-816 - No longer create DEFAULT collection when creating another collection
if (!pageName.equals(Constants.DEFAULT_COLLECTION_PAGE)){
if (!isDefaultCollectionExists(space)){
createDefaultCollection(space);
}
}
*/
Document doc;
if (pageName.equals(Constants.DEFAULT_COLLECTION_PAGE)){
// Just create the Default collection
doc = createCompositeAsset(space, space+"."+Constants.ROOT_COLLECTION_PAGE, pageName, Constants.COMPOSITE_COLLECTION, -1);
} else {
doc = createTempCompositeAsset(space+"."+Constants.ROOT_COLLECTION_PAGE, Constants.COMPOSITE_COLLECTION);
}
XWikiDocument xDoc = initCollectionSettings(doc.getFullName(), pageTitle, context);
return newDocument(new Document(), xDoc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private XWikiDocument initCollectionSettings(String fullName, String pageTitle, XWikiContext context) throws XWikiException {
XWikiDocument doc = context.getWiki().getDocument(fullName, context);
BaseObject obj = doc.getObject(Constants.ASSET_CLASS);
obj.set(Constants.ASSET_RIGHTS_PROPERTY, Constants.RIGHT_PUBLIC, context);
obj.set(Constants.ASSET_CATEGORY_PROPERTY, Constants.CATEGORY_COLLECTION, context);
obj.set(Constants.ASSET_TITLE_PROPERTY, pageTitle, context);
if (doc.getName().equals(Constants.DEFAULT_COLLECTION_PAGE)){
String username = context.getUser();
username = username.substring(username.indexOf(".")+1);
pageTitle = username + "'s " + context.getMessageTool().get("default_collection");
obj.setStringValue(Constants.ASSET_TITLE_PROPERTY, pageTitle);
obj.setLargeStringValue(Constants.ASSET_DESCRIPTION_PROPERTY, pageTitle);
// we select the "Other" the education level
List eduList = new ArrayList();
eduList.add("na");
obj.setDBStringListValue(Constants.ASSET_EDUCATIONAL_LEVEL_PROPERTY, eduList);
}
// we select the root of the Master framework
List fwList = new ArrayList();
fwList.add("FW_masterFramework.WebHome");
obj.setDBStringListValue(Constants.ASSET_FW_ITEMS_PROPERTY, fwList);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.initmetadatafornewcollection"), context);
return doc;
}
public Document updateMetadata(String fullName, boolean fromTemplate) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument doc = context.getWiki().getDocument(fullName, context);
BaseObject assetObj = doc.getObject(Constants.ASSET_CLASS);
// Here we discover the type of file attached
if (doc.getAttachmentList().size() > 0 || doc.getObject(Constants.EXTERNAL_ASSET_CLASS) != null){
String category = discoverTechnicalMetadata(doc, context);
assetObj.setStringValue(Constants.ASSET_CATEGORY_PROPERTY, category);
}
//if it's a file uploaded or a link we derive the title of the asset from the name of the file
String title = assetObj.getStringValue(Constants.ASSET_TITLE_PROPERTY);
if ((title==null)||(title.equals(""))) {
title = discoverAssetTitle(doc);
if (title != null){
assetObj.setStringValue(Constants.ASSET_TITLE_PROPERTY, title);
}
}
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.initmetadatafornewasset"), context);
return newDocument(new Document(), doc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private String discoverAssetTitle(XWikiDocument doc){
String title = null;
if (doc.getAttachmentList().size() > 0){
title = ((XWikiAttachment)doc.getAttachmentList().get(0)).getFilename();
}
else if (doc.getObject(Constants.EXTERNAL_ASSET_CLASS) != null){
title = doc.getObject(Constants.EXTERNAL_ASSET_CLASS).getStringValue(Constants.EXTERNAL_ASSET_LINK_PROPERTY);
if (title.lastIndexOf("/") == title.length() - 1){
title = title.substring(0, title.length() - 1);
}
if (title.contains("/")){
title = title.substring(title.lastIndexOf("/") + 1, title.length());
}
}
if (title == null)
return null;
if (title.contains("."))
title = title.substring(0, title.lastIndexOf("."));
title = title.replace("_", " ");
return title;
}
public boolean isRootCollectionExists(String space, XWikiContext context) throws XWikiGWTException {
try {
XWikiDocument doc = context.getWiki().getDocument(space, Constants.ROOT_COLLECTION_PAGE, context);
if (doc.isNew())
return false;
if (doc.getObjectNumbers(Constants.COMPOSITEASSET_CLASS) == 0){
return false;
} else {
// Work around a bug XWIKI-1624
// TODO: Remove the work-around once XWIKI-1624 is fixed
List subAssets = doc.getObjects(Constants.COMPOSITEASSET_CLASS);
Iterator i = subAssets.iterator();
int count = 0;
while (i.hasNext() && count == 0){
if (i.next() != null){
count++;
}
}
if (count == 0){
return false;
}
}
return true;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public boolean createRootCollection(String space, XWikiContext context) throws XWikiGWTException {
try {
protectSpace(space, context);
XWikiDocument doc = context.getWiki().getDocument(space, Constants.ROOT_COLLECTION_PAGE, context);
// We do not check the right when we create a collection we only check if the collection start by Coll_
// and does not exist
//if (!doc.isNew() || !space.startsWith("Coll_"))
// return false;
BaseObject CompObj = doc.newObject(Constants.COMPOSITEASSET_CLASS, context);
CompObj.set(Constants.COMPOSITEASSET_TYPE_PROPERTY, Constants.COMPOSITE_ROOT_COLLECTION, context);
doc.setCreator(context.getUser());
doc.setContent(Constants.COMPOSITE_ROOT_COLLECTION_CONTENT);
doc.setParent(context.getUser());
protectEditPage(doc, context);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.createrootcollection"), context);
return true;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private boolean addCompositeAssetToDefaultCollection(String assetPageName, String space, XWikiContext context) throws XWikiGWTException {
if (!isDefaultCollectionExists(space)) {
createDefaultCollection(space);
}
return insertSubAsset(space + "." + Constants.DEFAULT_COLLECTION_PAGE, assetPageName, -1);
}
public boolean addCompositeAssetToCollection(String assetPageName, String collectionName) throws XWikiGWTException {
return insertSubAsset(collectionName, assetPageName, -1);
}
public AssetItem getCollections() throws XWikiGWTException {
// Get list of all collections for the user
try {
XWikiContext context = getXWikiContext();
String user = context.getUser();
String space = "Coll_"+user.replaceFirst("XWiki.", "");
/* CURRIKI-816
* No longer create DEFAULT collection automatically if any other collection exists
* but create it if one does not exist
*/
if (!isRootCollectionExists(space, context)){
createDefaultCollection(space);
} else {
XWikiDocument doc = context.getWiki().getDocument(space+"."+Constants.ROOT_COLLECTION_PAGE, context);
if (doc.getObjectNumbers(Constants.SUBASSET_CLASS) == 0){
createDefaultCollection(space);
} else {
// Work around a bug XWIKI-1624
// TODO: Remove the work-around once XWIKI-1624 is fixed
List subAssets = doc.getObjects(Constants.SUBASSET_CLASS);
Iterator i = subAssets.iterator();
int count = 0;
while (i.hasNext() && count == 0){
if (i.next() != null){
count++;
}
}
if (count == 0){
createDefaultCollection(space);
}
}
}
return getCollectionTreeItem(space+"."+Constants.ROOT_COLLECTION_PAGE);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document createCompositeAsset(String space) throws XWikiGWTException {
return createCompositeAsset(space, null, -1);
}
/**
* @param space
* @return the Document of the newly created composite asset
*/
public Document createCompositeAsset(String space, String parent, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
String pageName = context.getWiki().getUniquePageName(space, context);
return createCompositeAsset(space, parent, pageName, position);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
/**
* @param space
* @return the Document of the newly created composite asset
*/
public Document createCompositeAsset(String space, String parent, String pageName, long position) throws XWikiGWTException {
return createCompositeAsset(space, parent, pageName, Constants.COMPOSITE_CURRIKI_DOCUMENT, position);
}
private void assertIfCompositeAssetDoesNotExist(String fullName, XWikiContext context) throws XWikiException {
XWikiDocument doc = context.getWiki().getDocument(fullName, context);
if (doc.getObject(Constants.COMPOSITEASSET_CLASS) == null)
throw new XWikiException(XWikiException.MODULE_XWIKI_GWT_API, -1, "Parent composite asset does not exist");
}
/**
* @param space
* @return the Document of the newly created composite asset
*/
public Document createCompositeAsset(String space, String parent, String pageName, String compositeAssetType, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
if (!isRootCollectionExists(space, context)){
createRootCollection(space, context);
}
if (parent == null){
parent = space + "." + Constants.DEFAULT_COLLECTION_PAGE;
}
// we create the default collection if the parent is the default one and does not already exist
if (parent.endsWith("." + Constants.DEFAULT_COLLECTION_PAGE) && !isDefaultCollectionExists(space)){
createDefaultCollection(space);
}
assertIfCompositeAssetDoesNotExist(parent, context);
XWikiDocument doc = createSourceAsset(parent, space, pageName, context);//context.getWiki().getDocument(space + "." + pageName, context);
//assertEditRight(doc, context);
doc.setContent("#includeForm(\"XWiki.CompositeAssetTemplate\")");
BaseObject CompObj = doc.newObject(Constants.COMPOSITEASSET_CLASS, context);
CompObj.set(Constants.COMPOSITEASSET_TYPE_PROPERTY, compositeAssetType, context);
if (compositeAssetType.equals(Constants.COMPOSITE_COLLECTION)){
protectEditPage(doc, context);
}
//doc.setCreator(context.getUser());
//protectPage(doc, context);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.startcreatingcompositeasset"), context);
insertSubAsset(parent, doc.getFullName(), position);
return newDocument(new Document(), doc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document createTempCompositeAsset(String parent) throws XWikiGWTException {
return createTempCompositeAsset(parent, Constants.COMPOSITE_CURRIKI_DOCUMENT);
}
public Document createTempCompositeAsset(String parent, String compositeAssetType) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument doc = createTempSourceAsset(parent, context);
BaseObject compObj = doc.newObject(Constants.COMPOSITEASSET_CLASS, context);
compObj.set(Constants.COMPOSITEASSET_TYPE_PROPERTY, compositeAssetType, context);
doc.setContent("#includeForm(\"XWiki.CompositeAssetTemplate\")");
doc.setCreator(context.getUser());
protectPage(doc, context);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.startcreatingcompositeasset"), context);
return newDocument(new Document(), doc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private boolean insertDirectionBlock(String compositeAssetPage, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
BaseObject directionObj = compositeAssetDoc.newObject(Constants.DIRECTION_CLASS, context);
addSubAsset(compositeAssetDoc, Constants.DIRECTION + directionObj.getNumber(), position, context);
context.getWiki().saveDocument(compositeAssetDoc, context.getMessageTool().get("curriki.comment.adddirectionblock"), context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
return false;
}
public boolean insertSubAsset(String compositeAssetPage, String assetPageName, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
position = addSubAsset(compositeAssetDoc, assetPageName, position, context);
XWikiDocument assetDoc = context.getWiki().getDocument(assetPageName, context);
List params = new ArrayList();
params.add(assetDoc.getStringValue(Constants.ASSET_TITLE_PROPERTY));
params.add(assetDoc.getFullName());
params.add(assetDoc.getStringValue(Constants.ASSET_CATEGORY_PROPERTY));
params.add("" + position);
String comment = context.getMessageTool().get("curriki.comment.insertsubassetincompositeasset", params);
context.getWiki().saveDocument(compositeAssetDoc, comment, context);
return true;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private long addSubAsset(XWikiDocument compositeAssetDoc, String assetPageName, long position, XWikiContext context) throws XWikiException, XWikiGWTException {
assertEditRight(compositeAssetDoc, context);
// CURRIKI-314 -- Do not allow an ancestor to be added as a sub-asset
XWikiMessageTool msg = (XWikiMessageTool) context.get("msg");
boolean done = false;
List searchFor = new ArrayList();
searchFor.add(compositeAssetDoc.getFullName());
while (!done){
String sql = null;
for (int i=0;i<searchFor.size();i++) {
String item = (String) searchFor.get(i);
if (item.equals(assetPageName)){
throw new XWikiGWTException(msg.get("addsubasset.recursive_add"), msg.get("addsubasset.recursive_add_message"), XWikiException.ERROR_XWIKI_CONTENT_LINK_INVALID_TARGET, XWikiException.MODULE_XWIKI_GWT_API);
}
if (sql != null){
sql = sql + ", '" + item + "'";
} else {
sql = "'" + item + "'";
}
}
sql = ", BaseObject as obj, StringProperty as prop where obj.name=doc.fullName and obj.className='XWiki.SubAssetClass' and prop.id.id = obj.id and prop.name='assetpage' and prop.value in (" + sql + ")";
List list = context.getWiki().getStore().searchDocumentsNames(sql, context);
if ((list==null)||(list.size()==0)){
done = true;
} else {
searchFor = list;
}
}
if (position!=-1)
relocateAssets(compositeAssetDoc, position, 1, context);
else
position = getEndPosition(compositeAssetDoc, context);
int index = compositeAssetDoc.createNewObject(Constants.SUBASSET_CLASS, context);
BaseObject obj = compositeAssetDoc.getObject(Constants.SUBASSET_CLASS, index);
obj.set(Constants.SUBASSET_ASSETPAGE_PROPERTY, assetPageName, context);
obj.set(Constants.SUBASSET_ORDER_PROPERTY, new Long(position), context);
return position;
}
public Document createTempSourceAsset(String compositeAssetPage) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = createTempSourceAsset(compositeAssetPage, context);
return newDocument(new Document(), assetDoc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public List removeSubAsset(String compositeAssetPage, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
assertEditRight(compositeAssetDoc, context);
List objs = compositeAssetDoc.getObjects(Constants.SUBASSET_CLASS);
Iterator it = objs.iterator();
while(it.hasNext()){
BaseObject obj = (BaseObject) it.next();
if(obj == null)
continue;
long currPos = obj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (currPos == position){
compositeAssetDoc.removeObject(obj);
relocateAssets(compositeAssetDoc, position, -1, context);
XWikiDocument subassetDoc = context.getWiki().getDocument(obj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY), context);
List params = new ArrayList();
params.add(subassetDoc.getDisplayTitle(context));
params.add(subassetDoc.getFullName());
params.add("" + position);
String comment = context.getMessageTool().get("curriki.comment.removesubassetincompositeasset", params);
context.getWiki().saveDocument(compositeAssetDoc, comment, context);
break;
}
}
return getCompositeAsset(compositeAssetPage);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public void applyRightsPolicy(String assetName) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = context.getWiki().getDocument(assetName, context);
applyRightsPolicy(assetDoc, context);
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.applyrights"), context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private void applyRightsPolicy(XWikiDocument assetDoc, XWikiContext context) throws XWikiException {
assetDoc.removeObjects("XWiki.XWikiRights");
BaseObject assetObj = assetDoc.getObject(Constants.ASSET_CLASS);
String rights = assetObj.getStringValue(Constants.ASSET_RIGHTS_PROPERTY);
BaseObject rightObj = assetDoc.newObject("XWiki.XWikiRights", context);
rightObj.setStringValue("groups", "XWiki.XWikiAdminGroup");
rightObj.setStringValue("levels", "edit");
rightObj.setIntValue("allow", 1);
rightObj = assetDoc.newObject("XWiki.XWikiRights", context);
rightObj.setStringValue("users", ("".equals(assetDoc.getCreator())) ? context.getUser() : assetDoc.getCreator());
rightObj.setStringValue("levels", "edit");
rightObj.setIntValue("allow", 1);
if (rights != null && rights.equals(Constants.RIGHT_PUBLIC)) {
rightObj = assetDoc.newObject("XWiki.XWikiRights", context);
rightObj.setStringValue("groups", "XWiki.XWikiAllGroup");
rightObj.setStringValue("levels", "edit");
rightObj.setIntValue("allow", 1);
}
else if (rights != null && rights.equals(Constants.RIGHT_PROTECTED)) {
}
else {
rightObj = assetDoc.newObject("XWiki.XWikiRights", context);
rightObj.setStringValue("users", ("".equals(assetDoc.getCreator())) ? context.getUser() : assetDoc.getCreator());
rightObj.setStringValue("levels", "view");
rightObj.setIntValue("allow", 1);
}
}
private void protectPage(XWikiDocument assetDoc, XWikiContext context) throws XWikiException {
assetDoc.removeObjects("XWiki.XWikiRights");
BaseObject obj = assetDoc.newObject("XWiki.XWikiRights", context);
obj.setStringValue("groups", "XWiki.XWikiAdminGroup");
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
obj = assetDoc.newObject("XWiki.XWikiRights", context);
obj.setStringValue("groups", "XWiki.XWikiAllGroup");
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
obj = assetDoc.newObject("XWiki.XWikiRights", context);
obj.setStringValue("users", ("".equals(assetDoc.getCreator())) ? context.getUser() : assetDoc.getCreator());
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
}
private void protectEditPage(XWikiDocument doc, XWikiContext context) throws XWikiException {
BaseObject obj = doc.newObject("XWiki.XWikiRights", context);
obj.setStringValue("users", ("".equals(doc.getCreator())) ? context.getUser() : doc.getCreator());
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
}
private void protectSpace(String spaceName, XWikiContext context) throws XWikiException {
String owner = context.getUser();
boolean ownerIsUser = true;
if (spaceName.startsWith("Coll_")){
String spaceOwner = "XWiki."+spaceName.replaceFirst("Coll_", "");
if (context.getWiki().exists(spaceOwner, context)){
XWikiDocument ownerDoc = context.getWiki().getDocument(spaceOwner, context);
BaseObject userObj = ownerDoc.getObject("XWiki.XWikiUsers");
if (userObj != null){
owner = spaceOwner;
ownerIsUser = true;
} else {
BaseObject groupObj = ownerDoc.getObject("XWiki.XWikiGroups");
if (groupObj != null){
owner = spaceOwner;
ownerIsUser = false;
} else {
throw new XWikiException(XWikiException.MODULE_XWIKI_GWT_API, XWikiException.ERROR_XWIKI_DOES_NOT_EXIST, "Cannot set owner for "+spaceName+". No user or group exists.");
}
}
}
}
XWikiDocument doc = context.getWiki().getDocument(spaceName, "WebPreferences", context);
doc.removeObjects("XWiki.XWikiGlobalRights");
BaseObject obj = doc.newObject("XWiki.XWikiGlobalRights", context);
obj.setStringValue("groups", "XWiki.XWikiAllGroup, XWiki.EditorGroup");
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
obj = doc.newObject("XWiki.XWikiGlobalRights", context);
if (ownerIsUser){
obj.setStringValue("users", owner);
} else {
obj.setStringValue("groups", owner);
}
obj.setStringValue("levels", "edit");
obj.setIntValue("allow", 1);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.protectspace"), context);
}
private XWikiDocument createTempSourceAsset(String compositeAssetPage, XWikiContext context) throws XWikiException, XWikiGWTException {
return createSourceAsset(compositeAssetPage, Constants.TEMPORARY_ASSET_SPACE, null, context);
}
private boolean copyProperty(BaseObject fromObj, BaseObject destObj, String key) throws XWikiException {
PropertyInterface prop = fromObj.get(key);
if (prop == null)
return false;
PropertyInterface newProp = (PropertyInterface) ((BaseElement)prop).clone();
newProp.setObject(destObj);
destObj.safeput(key, newProp);
return true;
}
/**
* Merging properties (in the case of ListProperties)
* @param fromObj
* @param destObj
* @param key
* @return
* @throws XWikiException
*/
private boolean mergeProperty(BaseObject fromObj, BaseObject destObj, String key) throws XWikiException {
PropertyInterface prop = fromObj.get(key);
if (prop == null)
return false;
PropertyInterface newProp = destObj.get(key);
if (newProp==null) {
newProp = (PropertyInterface) ((BaseElement)prop).clone();
newProp.setObject(destObj);
destObj.safeput(key, newProp);
return true;
} else {
if (newProp instanceof ListProperty) {
List list1 = ((ListProperty)newProp).getList();
List list2 = ((ListProperty)prop).getList();
for(int i=0;i<list2.size();i++) {
Object item = list2.get(i);
if (!list1.contains(item))
list1.add(item);
}
((ListProperty)newProp).setList(list1);
newProp.setObject(destObj);
destObj.safeput(key, newProp);
return true;
} else {
return false;
}
}
}
private XWikiDocument createSourceAsset(String compositeAssetPage, String space, String pageName, XWikiContext context) throws XWikiException, XWikiGWTException {
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
assertEditRight(compositeAssetDoc, context);
if (pageName == null){
pageName = context.getWiki().getUniquePageName(space, context);
}
XWikiDocument assetDoc = context.getWiki().getDocument(space, pageName, context);
BaseObject newObjAsset = assetDoc.newObject(Constants.ASSET_CLASS, context);
// the Root collection does not have an asset class
if (compositeAssetDoc.getObject(Constants.ASSET_CLASS) != null) {
BaseObject parentObjAsset = (BaseObject) compositeAssetDoc.getObject(Constants.ASSET_CLASS).clone();
copyProperty(parentObjAsset, newObjAsset, Constants.ASSET_EDUCATIONAL_LEVEL_PROPERTY);
copyProperty(parentObjAsset, newObjAsset, Constants.ASSET_FW_ITEMS_PROPERTY);
copyProperty(parentObjAsset, newObjAsset, Constants.ASSET_RIGHTS_PROPERTY);
}
// let's make sure default value is not empty
String rights = newObjAsset.getStringValue(Constants.ASSET_RIGHTS_PROPERTY);
if ((rights==null)||(rights.equals("")))
newObjAsset.setStringValue(Constants.ASSET_RIGHTS_PROPERTY, Constants.RIGHT_PUBLIC);
BaseObject newLicenceObj = assetDoc.newObject(Constants.ASSET_LICENCE_CLASS, context);
// the Root collection does not have an asset Licence class
if (compositeAssetDoc.getObject(Constants.ASSET_LICENCE_CLASS) != null) {
BaseObject parentLicenceObjAsset = compositeAssetDoc.getObject(Constants.ASSET_LICENCE_CLASS);
copyProperty(parentLicenceObjAsset, newLicenceObj, Constants.ASSET_LICENCE_TYPE_PROPERTY);
} else {
newLicenceObj.setStringValue(Constants.ASSET_LICENCE_TYPE_PROPERTY, Constants.ASSET_LICENCE_TYPE_DEFAULT);
}
// User should be by default the pretty name of the user
newLicenceObj.setStringValue(Constants.ASSET_LICENCE_RIGHT_HOLDER_PROPERTY, context.getWiki().getLocalUserName(context.getUser(), null, false, context));
assetDoc.setCustomClass(Asset.class.getName());
assetDoc.setContent("#includeForm(\"XWiki.AssetTemplate\")");
assetDoc.setCreator(context.getUser());
protectPage(assetDoc, context);
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.createnewsourceasset"), context);
return assetDoc;
}
/**
* Creates a Source Asset from a template by copying all objects and merging the meta data from the parent composite asset
* @param templatePageName
* @param compositeAssetPage
* @return AssetDocument stored in the temporary space
* @throws XWikiGWTException
*/
public Document createTempSourceAssetFromTemplate(String templatePageName, String compositeAssetPage, boolean clearattachments) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = createTempSourceAssetFromTemplate(templatePageName, compositeAssetPage, clearattachments, context);
return newDocument(new Document(), assetDoc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private XWikiDocument createTempSourceAssetFromTemplate(String templatePageName, String compositeAssetPage, boolean clearattachments, XWikiContext context) throws XWikiException, XWikiGWTException {
return createSourceAssetFromTemplate(templatePageName, compositeAssetPage, Constants.TEMPORARY_ASSET_SPACE, null, clearattachments, context);
}
private XWikiDocument createSourceAssetFromTemplate(String templatePageName, String compositeAssetPage, String space, String pageName, boolean clearattachments, XWikiContext context) throws XWikiException, XWikiGWTException {
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
assertEditRight(compositeAssetDoc, context);
XWikiDocument templateAssetDoc = context.getWiki().getDocument(templatePageName, context);
assertViewRight(templateAssetDoc, context);
if (assertDuplicateRight(templateAssetDoc, context)==false)
return null;
if (pageName == null){
pageName = context.getWiki().getUniquePageName(space, context);
}
XWikiDocument assetDoc = context.getWiki().getDocument(space, pageName, context);
// Let's make a copy of the template
assetDoc = templateAssetDoc.copyDocument(space + "." + pageName, context);
// Remove comments from copied asset
if (assetDoc.getObjects("XWiki.XWikiComments")!=null)
assetDoc.getObjects("XWiki.XWikiComments").clear();
// Let's remove all attachements
if (clearattachments) {
assetDoc.setAttachmentList(new ArrayList());
}
BaseObject newObjAsset = assetDoc.getObject(Constants.ASSET_CLASS);
if (newObjAsset==null)
newObjAsset = assetDoc.newObject(Constants.ASSET_CLASS, context);
// the Root collection does not have an asset class
if (compositeAssetDoc.getObject(Constants.ASSET_CLASS) != null) {
BaseObject parentObjAsset = (BaseObject) compositeAssetDoc.getObject(Constants.ASSET_CLASS).clone();
mergeProperty(parentObjAsset, newObjAsset, Constants.ASSET_EDUCATIONAL_LEVEL_PROPERTY);
mergeProperty(parentObjAsset, newObjAsset, Constants.ASSET_FW_ITEMS_PROPERTY);
copyProperty(parentObjAsset, newObjAsset, Constants.ASSET_RIGHTS_PROPERTY);
}
// let's make sure default value is not empty
String rights = newObjAsset.getStringValue(Constants.ASSET_RIGHTS_PROPERTY);
if ((rights==null)||(rights.equals("")))
newObjAsset.setStringValue(Constants.ASSET_RIGHTS_PROPERTY, Constants.RIGHT_PUBLIC);
BaseObject newLicenceObj = assetDoc.getObject(Constants.ASSET_LICENCE_CLASS);
if (newLicenceObj==null)
newLicenceObj = assetDoc.newObject(Constants.ASSET_LICENCE_CLASS, context);
// the Root collection does not have an asset Licence class
if (compositeAssetDoc.getObject(Constants.ASSET_LICENCE_CLASS) != null) {
BaseObject parentLicenceObjAsset = compositeAssetDoc.getObject(Constants.ASSET_LICENCE_CLASS);
copyProperty(parentLicenceObjAsset, newLicenceObj, Constants.ASSET_LICENCE_TYPE_PROPERTY);
}
else {
newLicenceObj.setStringValue(Constants.ASSET_LICENCE_TYPE_PROPERTY, Constants.ASSET_LICENCE_TYPE_DEFAULT);
}
// User should be by default the pretty name of the user added with the current template rights holder
String newRightsHolder = context.getWiki().getLocalUserName(context.getUser(), null, false, context);
String templateRightsHolder = templateAssetDoc.getStringValue(Constants.ASSET_LICENCE_CLASS, Constants.ASSET_LICENCE_RIGHT_HOLDER_PROPERTY);
if (!newRightsHolder.equals(templateRightsHolder))
newRightsHolder += ", " + templateRightsHolder;
newLicenceObj.setStringValue(Constants.ASSET_LICENCE_RIGHT_HOLDER_PROPERTY, newRightsHolder);
// Keep the information allowing to track where that asset came from
newObjAsset.setStringValue(Constants.ASSET_TRACKING_PROPERTY, templatePageName);
assetDoc.setCreator(context.getUser());
// Clear rights objects otherwise this will trigger a remove object although these have never been saved
assetDoc.setObjects("XWiki.XWikiRights", new Vector());
protectPage(assetDoc, context);
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.createnewsourceassetfromtemplate"), context);
return assetDoc;
}
public String duplicateTemplateAsset(String parentAsset, String templatePageName, long index) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(parentAsset, context);
XWikiDocument templateAssetDoc = context.getWiki().getDocument(templatePageName, context);
String space = compositeAssetDoc.getSpace();
// Let's choose a nice name for the page
String prettyName = context.getWiki().clearName(templateAssetDoc.getStringValue(Constants.ASSET_TITLE_PROPERTY), true, true, context);
XWikiDocument newAssetDoc = createSourceAssetFromTemplate(templatePageName, parentAsset, space, prettyName, false, context);
if (newAssetDoc==null) {
throw new XWikiException(XWikiException.MODULE_XWIKI_GWT_API, -1, "Asset does not allow derivatives");
}
if (replaceSubAsset(compositeAssetDoc, templatePageName, newAssetDoc.getFullName(), index)==false) {
throw new XWikiException(XWikiException.MODULE_XWIKI_GWT_API, -1, "Could not find page " + templatePageName + " at position " + index + " in composite asset " + parentAsset);
} else {
List params = new ArrayList();
params.add("" + index);
params.add(newAssetDoc.getFullName());
params.add(prettyName);
String comment = context.getMessageTool().get("curriki.comment.duplicatetemplatesourceasset", params);
context.getWiki().saveDocument(compositeAssetDoc, comment, context);
return newAssetDoc.getFullName();
}
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
// Zip Assets
public List getFileTreeList(String pageName, String fileName) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
ZipExplorerPlugin zipe = (ZipExplorerPlugin) context.getWiki().getPlugin("zipexplorer", context);
if (zipe==null)
throw new XWikiException(XWikiException.MODULE_XWIKI_GWT_API, -1, "Zip Explorer is not loaded");
XWikiDocument doc = context.getWiki().getDocument(pageName, context);
assertViewRight(doc , context);
List treeListItems = zipe.getFileTreeList(new com.xpn.xwiki.api.Document(doc, context), fileName, context);
List tree = new ArrayList();
if ((treeListItems==null)||(treeListItems.size()==0))
return tree;
for (int i=0;i<treeListItems.size();i++) {
ListItem item = (ListItem) treeListItems.get(i);
tree.add(new TreeListItem(item.getId(), item.getValue(), item.getParent()));
}
return tree;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
private boolean replaceSubAsset(XWikiDocument compositeAssetDoc, String previousAssetPageName, String newAssetPageName, long position) throws XWikiGWTException {
List objs = compositeAssetDoc.getObjects(Constants.SUBASSET_CLASS);
if (objs==null)
return false;
Iterator it = objs.iterator();
while(it.hasNext()){
BaseObject obj = (BaseObject) it.next();
if(obj == null)
continue;
long currPos = obj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (currPos == position){
String currentAssetPageName = obj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY);
if (!currentAssetPageName.equals(previousAssetPageName))
return false;
else {
obj.setStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY, newAssetPageName);
return true;
}
}
}
return false;
}
private boolean assertDuplicateRight(XWikiDocument templateAssetDoc, XWikiContext context) throws XWikiGWTException {
if (templateAssetDoc.getCreator().equals(context.getUser()))
return true;
BaseObject obj = templateAssetDoc.getObject(Constants.ASSET_LICENCE_CLASS);
if (obj != null){
String licence = obj.getStringValue(Constants.ASSET_LICENCE_TYPE_PROPERTY);
if (licence.contains("NoDerivatives"))
return false;
}
return true;
}
public void finishUpdateMetaData(String assetPage) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument doc = context.getWiki().getDocument(assetPage, context);
applyRightsPolicy(doc, context);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.applyrights"), context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document finalizeAssetCreation(String assetPage, String compositeAssetPage, long position) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
if (compositeAssetPage.equals(Constants.ROOT_COLLECTION_PAGE)){
String space = "Coll_"+context.getUser().replaceFirst("XWiki.", "");
compositeAssetPage = space+"."+Constants.ROOT_COLLECTION_PAGE;
}
XWikiDocument compositeAssetDoc = context.getWiki().getDocument(compositeAssetPage, context);
assertEditRight(compositeAssetDoc, context);
String space = compositeAssetDoc.getSpace();
XWikiDocument assetDoc = context.getWiki().getDocument(assetPage, context);
assertEditRight(assetDoc, context);
// Let's choose a nice name for the page
String prettyName = context.getWiki().clearName(assetDoc.getStringValue(Constants.ASSET_TITLE_PROPERTY), true, true, context);
assetDoc.rename(space + "." + getUniquePageName(space, prettyName), new ArrayList(), context);
position = addSubAsset(compositeAssetDoc, space + "." + assetDoc.getName(), position, context);
List params = new ArrayList();
params.add(assetDoc.getStringValue(Constants.ASSET_TITLE_PROPERTY));
params.add(assetDoc.getFullName());
params.add(assetDoc.getStringValue(Constants.ASSET_CATEGORY_PROPERTY));
params.add("" + position);
String comment = context.getMessageTool().get("curriki.comment.addingsubassettocompositeasset", params);
context.getWiki().saveDocument(compositeAssetDoc, comment, context);
applyRightsPolicy(assetDoc, context);
params = new ArrayList();
params.add(assetDoc.getStringValue(Constants.ASSET_CATEGORY_PROPERTY));
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.finishcreatingsubasset", params), context);
LucenePlugin lucene = (LucenePlugin) context.getWiki().getPlugin("lucene", context);
// Workaround to make sure assets are indexed.
// Because the XWiki Attachment API does not call the lucene plugin
if (lucene != null) {
lucene.queueDocument(assetDoc, context);
if (assetDoc.getAttachmentList() != null && assetDoc.getAttachmentList().size() > 0)
lucene.queueAttachment(assetDoc, context);
}
return getDocument(space + "." + assetDoc.getName());
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document createTextSourceAsset(String compositeAssetPage, long type) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = createTempSourceAsset(compositeAssetPage, context);
BaseObject obj = assetDoc.getObject(Constants.TEXTASSET_CLASS,assetDoc.createNewObject(Constants.TEXTASSET_CLASS, context));
obj.setLongValue(Constants.TEXTASSET_TYPE_PROPERTY, type);
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.createtextsourceasset"), context);
return newDocument(new Document(), assetDoc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document createLinkAsset(String compositeAssetPage, String link) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = createTempSourceAsset(compositeAssetPage, context);
BaseObject obj = assetDoc.getObject(Constants.EXTERNAL_ASSET_CLASS, assetDoc.createNewObject(Constants.EXTERNAL_ASSET_CLASS, context));
obj.setStringValue(Constants.EXTERNAL_ASSET_LINK_PROPERTY, link);
context.getWiki().saveDocument(assetDoc, context.getMessageTool().get("curriki.comment.createlinksourceasset"), context);
return newDocument(new Document(), assetDoc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public void moveAsset(String assetName, String fromParent, long fromPosition, String toParent, long toPosition) throws XWikiGWTException {
if ((fromParent!=null) && fromParent.equals(toParent) && fromPosition < toPosition){
toPosition
}
if (removeSubAsset(fromParent, fromPosition) != null)
insertSubAsset(toParent, assetName, toPosition);
}
/**
* Relocates subasset to prepare a space to put a new subasset at position startPos
* @param doc
* @param startPos
* @param move
* @param context
* @throws XWikiException
*/
private void relocateAssets(XWikiDocument doc, long startPos, long move, XWikiContext context) throws XWikiException {
List subassets = doc.getObjects(Constants.SUBASSET_CLASS);
if (subassets == null)
return ;
Iterator it = subassets.iterator();
while (it.hasNext()) {
BaseObject obj = (BaseObject) it.next();
if(obj == null)
continue;
long obj_pos = obj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (obj_pos >= startPos) {
obj.set(Constants.SUBASSET_ORDER_PROPERTY, new Long(obj_pos + move), context);
}
}
if(log.isWarnEnabled())
checkSubAssetPosition(doc);
}
/**
* Get the maximum order in the subasset list
* @param doc
* @param context
* @throws XWikiException
*/
private long getEndPosition(XWikiDocument doc, XWikiContext context) throws XWikiException {
long position = 0;
List subassets = doc.getObjects(Constants.SUBASSET_CLASS);
if (subassets == null)
return position;
Iterator it = subassets.iterator();
while (it.hasNext()) {
BaseObject obj = (BaseObject) it.next();
if(obj == null)
continue;
long obj_pos = obj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (obj_pos >= position) {
position = obj_pos + 1;
}
}
return position;
}
/**
* this function is just for debug purpose. it log problems in the order of asset inside a collecttion
* @param doc
*/
private void checkSubAssetPosition(XWikiDocument doc){
List subassets = doc.getObjects(Constants.SUBASSET_CLASS);
Map orderedList = new HashMap();
Iterator it = subassets.iterator();
while(it.hasNext()){
BaseObject obj = (BaseObject) it.next();
if(obj == null)
continue;
long pos = obj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (pos >= subassets.size()){
log.warn("asset " + obj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY) + " position is not right.");
continue;
}
if(orderedList.size() > pos && orderedList.get(new Long(pos)) != null) {
log.warn("we are trying to add " + obj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY) +
" but there is already an asset (" + ((BaseObject)orderedList.get(new Long(pos))).getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY)+ ") at the position " + pos + ".");
continue;
}
orderedList.put(new Long(pos), obj);
}
for(long i = 0; i < orderedList.size(); i++){
if (orderedList.get(new Long(i)) == null){
log.warn("there is no asset at the position " + i);
}
}
}
public AssetItem getFullTreeItem(String rootAssetPage) throws XWikiGWTException {
AssetItem root = new AssetItem(rootAssetPage, 0);
try {
return getFullTreeItem(root, getXWikiContext());
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public AssetItem getFullTreeItem(AssetItem parent, XWikiContext context) throws XWikiException, XWikiGWTException {
XWikiDocument doc = context.getWiki().getDocument(parent.getAssetPage(), context);
if (doc.getObject(Constants.COMPOSITEASSET_CLASS) != null){
parent.setType(Constants.CATEGORY_COLLECTION);
parent.setProtected(true);
}
try {
assertViewRight(doc, context);
} catch (Exception e) {
parent.setText(context.getMessageTool().get("assetprivate"));
return parent;
}
BaseObject obj = doc.getObject(Constants.ASSET_CLASS);
String title = (obj==null) ? null : obj.getStringValue(Constants.ASSET_TITLE_PROPERTY);
if (title == null || title.length() == 0) {
title = context.getMessageTool().get("assetuntitled");
}
parent.setText(title);
List objs_old = doc.getObjects(Constants.SUBASSET_CLASS);
if (objs_old == null || objs_old.size() == 0)
return parent;
List objs = new ArrayList(objs_old);
List items = new ArrayList();
while(objs.size() > 0) {
BaseObject smallerObj = null;
long smaller_pos = -100;
Iterator it = objs.iterator();
while(it.hasNext()) {
BaseObject currObj = (BaseObject) it.next();
if(currObj == null)
continue;
long currPos = currObj.getLongValue("order");
if ((smaller_pos == -100) || (currPos < smaller_pos)) {
smaller_pos = currPos;
smallerObj = currObj;
}
}
if (smallerObj == null)
break;
objs.remove(smallerObj);
String assetPage = smallerObj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY);
long index = smallerObj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
AssetItem currItem = new AssetItem(assetPage, index);
items.add(currItem);
if (assetPage.equals(Constants.PAGE_BREAK)){
currItem.setText("
}
else
getFullTreeItem(currItem, context);
}
parent.setItems(items);
return parent;
}
private String discoverTechnicalMetadata(XWikiDocument doc, XWikiContext context) throws XWikiGWTException {
String category = null;
try {
assertEditRight(doc, context);
String fileName = null;
if (doc.getAttachmentList().size() > 0)
{
XWikiAttachment attach = (XWikiAttachment) doc.getAttachmentList().get(0);
fileName = attach.getFilename();
category = Constants.CATEGORY_UNKNOWN;
}
else
{
BaseObject obj = doc.getObject(Constants.EXTERNAL_ASSET_CLASS);
fileName = obj.getStringValue(Constants.EXTERNAL_ASSET_LINK_PROPERTY);
category = Constants.CATEGORY_LINK;
if (fileName == null)
return null;
try {
URL url = new URL(fileName);
fileName = url.getFile();
} catch (MalformedURLException e) {
return category;
}
}
String extension = (fileName.lastIndexOf(".") != -1 ? fileName.substring(fileName.lastIndexOf(".") + 1).toLowerCase(): null);
MimeTypePlugin mimePlugin = (MimeTypePlugin) context.getWiki().getPlugin(MimeTypePlugin.PLUGIN_NAME, context);
MimeType mime = (mimePlugin==null) ? null : mimePlugin.getCategoryByExtension(extension, context);
if (mime != null)
category = mime.getCategoryName();
if (category.equals(Constants.CATEGORY_IMAGE)){
ImagePlugin imgPlugin = (ImagePlugin) context.getWiki().getPlugin(ImagePlugin.PLUGIN_NAME, context);
BaseObject obj = doc.getObject(mime.getFullName());
if (obj == null){
doc.createNewObject(mime.getFullName(), context);
obj = doc.getObject(mime.getFullName());
}
if (imgPlugin != null && doc.getAttachmentList().size() > 0) {
XWikiAttachment att = (XWikiAttachment) doc.getAttachmentList().get(0);
int height = imgPlugin.getHeight(att, context);
int width = imgPlugin.getWidth(att, context);
obj.setIntValue("height", height);
obj.setIntValue("width", width);
}
}
} catch (Exception e) {
throw getXWikiGWTException(e);
}
return category;
}
/**
* return in the first element the Composite asset and in the second the assets
*
* @param compositeAssetPage
* @return
*/
public List getCompositeAsset(String compositeAssetPage) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument assetDoc = context.getWiki().getDocument(compositeAssetPage, context);
assertViewRight(assetDoc, context);
assetDoc = (XWikiDocument) assetDoc.clone();
List objs = assetDoc.getObjects(Constants.SUBASSET_CLASS);
List res = new ArrayList();
AssetDocument compositeDoc = (AssetDocument) newCurrikiDocument(assetDoc, true, true, true, false, null, context);
res.add(compositeDoc);
List assets = new ArrayList();
res.add(assets);
if (objs != null) {
while (objs.size() > 0) {
BaseObject smallerObj = null;
long smaller_pos = -1;
Iterator it = objs.iterator();
while (it.hasNext()) {
BaseObject currObj = (BaseObject) it.next();
if (currObj == null)
continue;
long currPos = currObj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if ((smaller_pos == -1) || (currPos < smaller_pos)) {
smaller_pos = currPos;
smallerObj = currObj;
}
}
if (smallerObj == null)
break;
objs.remove(smallerObj);
String assetPage = smallerObj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY);
if (!assetPage.equals(Constants.PAGE_BREAK)) {
Document doc = getDocument(assetPage, true, true, false, true);
if (doc!=null) {
if (doc instanceof AssetDocument) {
AssetDocument doc2 = (AssetDocument) doc;
doc2.setParentEditable(compositeDoc.hasEditRight());
doc2.setParentCurrikiTemplate(compositeDoc.isCurrikiTemplate());
doc2.setParent(assetDoc.getFullName());
assets.add(doc2);
} else {
// Document is not corrupted. Let's create new AssetDocument with allowed info
XWikiDocument origdoc = context.getWiki().getDocument(assetPage, context);
AssetDocument corruptedDoc = new AssetDocument();
corruptedDoc.setFullName(origdoc.getFullName());
corruptedDoc.setName(origdoc.getName());
corruptedDoc.setWeb(origdoc.getSpace());
corruptedDoc.setAuthor(origdoc.getAuthor());
corruptedDoc.setCreator(origdoc.getCreator());
corruptedDoc.setDate(origdoc.getDate().getTime());
corruptedDoc.setCreationDate(origdoc.getCreationDate().getTime());
corruptedDoc.setParent(assetDoc.getFullName());
corruptedDoc.setEditRight(false);
corruptedDoc.setViewRight(true);
corruptedDoc.setParentEditable(compositeDoc.hasEditRight());
corruptedDoc.setParentCurrikiTemplate(compositeDoc.isCurrikiTemplate());
assets.add(corruptedDoc);
}
} else {
// Document is not viewable. Let's create new AssetDocument with allowed info
XWikiDocument origdoc = context.getWiki().getDocument(assetPage, context);
AssetDocument protectedDoc = new AssetDocument();
protectedDoc.setFullName(origdoc.getFullName());
protectedDoc.setName(origdoc.getName());
protectedDoc.setWeb(origdoc.getSpace());
protectedDoc.setAuthor(origdoc.getAuthor());
protectedDoc.setCreator(origdoc.getCreator());
protectedDoc.setDate(origdoc.getDate().getTime());
protectedDoc.setCreationDate(origdoc.getCreationDate().getTime());
protectedDoc.setParent(assetDoc.getFullName());
protectedDoc.setEditRight(false);
protectedDoc.setViewRight(false);
protectedDoc.setParentEditable(compositeDoc.hasEditRight());
protectedDoc.setParentCurrikiTemplate(compositeDoc.isCurrikiTemplate());
assets.add(protectedDoc);
}
}
else
assets.add(Constants.PAGE_BREAK);
}
}
return res;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
protected Document newCurrikiDocument(XWikiDocument xdoc, boolean withObjects, boolean withViewDisplayers,
boolean withEditDisplayers, boolean withRenderedContent, String parent, XWikiContext context) throws XWikiGWTException {
AssetDocument doc = (AssetDocument) newDocument(null, xdoc, withObjects, withViewDisplayers, withEditDisplayers, withRenderedContent, context);
doc.setParent(parent);
return doc;
}
protected Document newDocument(Document doc, XWikiDocument xdoc, boolean withObjects, boolean withViewDisplayers,
boolean withEditDisplayers, boolean withRenderedContent, XWikiContext context) throws XWikiGWTException {
XWikiDocument cdoc = context.getDoc();
try {
context.setDoc(xdoc);
if(xdoc.getObject(Constants.ASSET_CLASS) != null)
doc = new AssetDocument();
else if (doc == null)
doc = new Document();
super.newDocument(doc, xdoc, withObjects, withViewDisplayers, withEditDisplayers, withRenderedContent, context);
BaseObject obj = xdoc.getObject(Constants.ASSET_LICENCE_CLASS);
if (obj != null){
String licence = obj.getStringValue(Constants.ASSET_LICENCE_TYPE_PROPERTY);
boolean licenceProtected = licence.contains("NoDerivatives");
((AssetDocument)doc).setLicenceProtected(licenceProtected);
((AssetDocument)doc).setDuplicatable((context.getUser().equals(doc.getCreator()))||(!licenceProtected));
if (doc.getWeb().startsWith("Templates_")||doc.getWeb().equals(Constants.TEMPLATES_SPACE))
((AssetDocument)doc).setCurrikiTemplate(true);
}
BaseObject cObj = xdoc.getObject(Constants.COMPOSITEASSET_CLASS);
if (cObj != null){
((AssetDocument)doc).setComposite(true);
}
return doc;
} finally {
context.setDoc(cdoc);
} }
/* Lucene Searching */
public List luceneSearch(String terms, int start, int nb) throws XWikiGWTException {
List docs = new ArrayList();
try {
XWikiContext context = getXWikiContext();
LucenePluginApi lucene = (LucenePluginApi) context.getWiki().getPluginApi("lucene", context);
// Need to add sorting
SearchResults search = lucene.getSearchResults(terms, "name", "default,en");
List results = search.getResults(start, nb);
// First element in list is the hit count
docs.add(Integer.valueOf(search.getTotalHitcount()));
Iterator i = results.iterator();
while (i.hasNext()) {
SearchResult r = (SearchResult) i.next();
XWikiDocument xd = context.getWiki().getDocument(r.getWeb()+"."+r.getName(), context);
Document doc = newDocument(new Document(), xd, true, true, false, false, context);
// TODO: We really should create a sub-class of Document for this
doc.setCreator(context.getWiki().getUserName(xd.getCreator(), null, false, context));
BaseObject obj = xd.getObject(Constants.COMPOSITEASSET_CLASS);
if (obj != null){
doc.setFormat("composite");
} else {
List attachments = xd.getAttachmentList();
if (!attachments.isEmpty()){
XWikiAttachment attach = (XWikiAttachment) attachments.get(0);
String attName = attach.getFilename();
String extension = (attName.lastIndexOf(".") != -1 ? attName.substring(attName.lastIndexOf(".") + 1).toUpperCase(): null);
if (extension == null){
extension = "Unknown";
}
doc.setFormat(extension);
} else {
obj = xd.getObject(Constants.EXTERNAL_ASSET_CLASS);
if (obj != null){
doc.setFormat("WWW");
} else {
doc.setFormat("block");
}
}
}
docs.add(doc);
}
} catch (Exception e){
throw getXWikiGWTException(e);
}
return docs;
}
public AssetItem getCollectionTreeItem(String rootAssetPage) throws XWikiGWTException {
AssetItem root = new AssetItem(rootAssetPage, 0);
try {
return getCollectionTreeItem(root, getXWikiContext());
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public AssetItem getCollectionTreeItem(AssetItem parent, XWikiContext context) throws XWikiGWTException {
XWikiDocument doc;
try {
doc = context.getWiki().getDocument(parent.getAssetPage(), context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
if (doc == null){
//return new AssetItem("ERROR_NO_SUCH_DOCUMENT_2-"+parent.getAssetPage(), -1);
return null;
}
BaseObject composite = doc.getObject(Constants.COMPOSITEASSET_CLASS);
if (composite == null){
// This is not a composite asset
//return parent;
return null;
}
BaseObject obj = doc.getObject(Constants.ASSET_CLASS);
String title;
if (obj == null){
// The root collection does not have an ASSET_CLASS object
if (composite.getStringValue(Constants.COMPOSITEASSET_TYPE_PROPERTY).equals(Constants.COMPOSITE_ROOT_COLLECTION)){
XWikiMessageTool msg = (XWikiMessageTool) context.get("msg");
title = msg.get("Root");
} else {
// This Composite Asset is invalid
return null;
}
} else {
// The root collection does not have an ASSET_CLASS object
title = obj.getStringValue(Constants.ASSET_TITLE_PROPERTY);
if (title == null || title.length() == 0) {
title = doc.getTitle();
if (title == null || title.length() == 0){
title = doc.getFullName();
if (title == null || title.length() == 0){
XWikiMessageTool msg = (XWikiMessageTool) context.get("msg");
title = msg.get("Untitled");
}
}
}
}
parent.setText(title);
List objs_old = doc.getObjects(Constants.SUBASSET_CLASS);
if (objs_old == null || objs_old.size() == 0){
return parent;
}
List objs = new ArrayList(objs_old);
List items = new ArrayList();
while(objs.size() > 0) {
BaseObject smallerObj = null;
long smaller_pos = -100;
Iterator it = objs.iterator();
while(it.hasNext()) {
BaseObject currObj = (BaseObject) it.next();
if(currObj == null){
continue;
}
long currPos = currObj.getLongValue("order");
if ((smaller_pos == -100) || (currPos < smaller_pos)) {
smaller_pos = currPos;
smallerObj = currObj;
}
}
if (smallerObj == null){
break;
}
objs.remove(smallerObj);
String assetPage = smallerObj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY);
long index = smallerObj.getLongValue(Constants.SUBASSET_ORDER_PROPERTY);
if (assetPage == null || assetPage.length() == 0){
continue;
} else if (assetPage.equals(Constants.PAGE_BREAK)){
continue;
} else {
AssetItem currItem = new AssetItem(assetPage, index);
currItem = getCollectionTreeItem(currItem, context);
if (currItem != null){
items.add(currItem);
}
}
}
if (!items.isEmpty()){
parent.setItems(items);
}
return parent;
}
// Templates
public List getTemplates() throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
String hsql = "where doc.name='WebHome' and (doc.web='" + Constants.TEMPLATES_SPACE + "' or doc.web like 'Templates_%') order by doc.fullName";
List templateList = new ArrayList();
List collectionList = context.getWiki().getStore().searchDocumentsNames(hsql, context);
for (int i=0;i<collectionList.size();i++) {
String docname = (String) collectionList.get(0);
XWikiDocument doc = context.getWiki().getDocument(docname, context);
Vector subassets = doc.getObjects(Constants.SUBASSET_CLASS);
if (subassets!=null) {
for (int j=0;j<subassets.size();j++) {
BaseObject subassetobj = (BaseObject) subassets.get(j);
if (subassetobj!=null) {
String templatePageName = subassetobj.getStringValue(Constants.SUBASSET_ASSETPAGE_PROPERTY);
XWikiDocument templateDoc = context.getWiki().getDocument(templatePageName, context);
String title = templateDoc.getStringValue(Constants.ASSET_CLASS, Constants.ASSET_TITLE_PROPERTY);
String desc = templateDoc.getStringValue(Constants.ASSET_CLASS, Constants.ASSET_DESCRIPTION_PROPERTY);
String imageURL = "";
// We look for the first attached file in the template collection
List attachmentList = templateDoc.getAttachmentList();
if ((attachmentList!=null)&&(attachmentList.size()>0)) {
XWikiAttachment attachment = (XWikiAttachment) attachmentList.get(0);
if (attachment!=null)
imageURL = templateDoc.getAttachmentURL(attachment.getFilename(), context);
}
templateList.add(new TemplateInfo(templatePageName, title, desc, imageURL));
}
}
}
}
return templateList;
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
public Document updateViditalk(String fullName, String videoId) throws XWikiGWTException {
try {
XWikiContext context = getXWikiContext();
XWikiDocument doc = context.getWiki().getDocument(fullName, context);
BaseObject assetObj = doc.getObject(Constants.ASSET_CLASS);
BaseObject videoObj = doc.getObject(Constants.VIDITALK_CLASS);
if (videoObj == null){
// Create a Viditalk Asset Object
doc.createNewObject(Constants.VIDITALK_CLASS, context);
videoObj = doc.getObject(Constants.VIDITALK_CLASS);
}
videoObj.setStringValue("video_id", videoId);
context.getWiki().saveDocument(doc, context.getMessageTool().get("curriki.comment.updatedviditalkid"), context);
return newDocument(new Document(), doc, true, false, true, false, context);
} catch (Exception e) {
throw getXWikiGWTException(e);
}
}
}
|
package com.googlecode.jslint4java.eclipse.builder;
import org.eclipse.core.resources.ICommand;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectDescription;
import org.eclipse.core.resources.IProjectNature;
import org.eclipse.core.runtime.CoreException;
import com.googlecode.jslint4java.eclipse.Activator;
public class JSLintNature implements IProjectNature {
/**
* ID of this project nature
*/
public static final String NATURE_ID = Activator.PLUGIN_ID + ".jsLintNature";
private IProject project;
public void configure() throws CoreException {
IProjectDescription desc = project.getDescription();
ICommand[] commands = desc.getBuildSpec();
for (int i = 0; i < commands.length; ++i) {
if (commands[i].getBuilderName().equals(JSLintBuilder.BUILDER_ID)) {
return;
}
}
ICommand[] newCommands = new ICommand[commands.length + 1];
System.arraycopy(commands, 0, newCommands, 0, commands.length);
ICommand command = desc.newCommand();
command.setBuilderName(JSLintBuilder.BUILDER_ID);
newCommands[newCommands.length - 1] = command;
desc.setBuildSpec(newCommands);
project.setDescription(desc, null);
}
public void deconfigure() throws CoreException {
IProjectDescription description = getProject().getDescription();
ICommand[] commands = description.getBuildSpec();
for (int i = 0; i < commands.length; ++i) {
if (commands[i].getBuilderName().equals(JSLintBuilder.BUILDER_ID)) {
ICommand[] newCommands = new ICommand[commands.length - 1];
System.arraycopy(commands, 0, newCommands, 0, i);
System.arraycopy(commands, i + 1, newCommands, i,
commands.length - i - 1);
description.setBuildSpec(newCommands);
return;
}
}
}
public IProject getProject() {
return project;
}
public void setProject(IProject project) {
this.project = project;
}
}
|
package pl.exsio.ca.module.terrain.report.impl;
import com.vaadin.addon.charts.Chart;
import com.vaadin.addon.charts.model.ChartType;
import com.vaadin.addon.charts.model.Configuration;
import com.vaadin.addon.charts.model.ContainerDataSeries;
import com.vaadin.addon.charts.model.Cursor;
import com.vaadin.addon.charts.model.PlotOptionsPie;
import com.vaadin.addon.charts.model.Tooltip;
import com.vaadin.addon.charts.model.style.Color;
import com.vaadin.addon.charts.themes.VaadinTheme;
import com.vaadin.addon.jpacontainer.JPAContainer;
import com.vaadin.addon.jpacontainer.JPAContainerFactory;
import com.vaadin.addon.jpacontainer.fieldfactory.SingleSelectConverter;
import static com.vaadin.addon.jpacontainer.filter.Filters.eq;
import com.vaadin.data.Item;
import com.vaadin.data.Property;
import com.vaadin.data.util.IndexedContainer;
import com.vaadin.data.util.converter.Converter;
import com.vaadin.data.util.converter.StringToDateConverter;
import com.vaadin.data.util.filter.UnsupportedFilterException;
import com.vaadin.ui.AbstractSelect;
import com.vaadin.ui.Button;
import com.vaadin.ui.ComboBox;
import com.vaadin.ui.Table;
import com.vaadin.ui.VerticalLayout;
import java.text.DateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import pl.exsio.ca.model.ServiceGroup;
import pl.exsio.ca.model.Terrain;
import pl.exsio.ca.model.TerrainAssignment;
import pl.exsio.ca.model.TerrainNotification;
import pl.exsio.ca.model.TerrainType;
import pl.exsio.ca.model.dao.TerrainDao;
import pl.exsio.ca.model.entity.factory.CaEntityFactory;
import pl.exsio.ca.model.entity.provider.provider.CaEntityProviderProvider;
import pl.exsio.ca.model.repository.provider.CaRepositoryProvider;
import pl.exsio.ca.module.terrain.report.Report;
import static pl.exsio.jin.translationcontext.TranslationContext.t;
import pl.exsio.frameset.vaadin.ui.support.component.ComponentFactory;
import pl.exsio.jin.annotation.TranslationPrefix;
/**
*
* @author exsio
*/
@TranslationPrefix("ca.report.abstract")
public abstract class AbstractReportImpl extends VerticalLayout implements Report {
protected static final int MODE_WORK = 0;
protected static final int MODE_REST = 1;
protected static final String ACTIVE_BUTTON_STYLE = "button-selected";
protected Button rest = new Button(t("rest"));
;
protected Button report = new Button(t("report"));
;
protected int lastMode = MODE_WORK;
protected CaEntityFactory caEntities;
protected CaRepositoryProvider caRepositories;
protected CaEntityProviderProvider caEntityProviders;
public void setCaEntities(CaEntityFactory caEntities) {
this.caEntities = caEntities;
}
public void setCaRepositories(CaRepositoryProvider caRepositories) {
this.caRepositories = caRepositories;
}
public void setCaEntityProviders(CaEntityProviderProvider caEntityProviders) {
this.caEntityProviders = caEntityProviders;
}
protected void setWorkActive() {
report.setStyleName(ACTIVE_BUTTON_STYLE);
rest.removeStyleName(ACTIVE_BUTTON_STYLE);
}
protected void setReportActive() {
rest.setStyleName(ACTIVE_BUTTON_STYLE);
report.removeStyleName(ACTIVE_BUTTON_STYLE);
}
protected Set<Terrain> getAllTerrainsExcludingIds(Date date, ServiceGroup group, TerrainType type, Collection ids) {
TerrainDao dao = this.caRepositories.getTerrainRepository();
if (ids.isEmpty()) {
ids.add(-1l);
}
if (group == null && type == null) {
return dao.findByAssignmentDateExcludingIds(date, ids);
} else if (group != null && type == null) {
return dao.findByGroupAndAssignmentDateExcludingIds(group, date, ids);
} else if (group != null && type != null) {
return dao.findByTypeAndGroupAndAssignmentDateExcludingIds(type, group, date, ids);
} else if (group == null && type != null) {
return dao.findByTypeAndAssignmentDateExcludingIds(type, date, ids);
} else {
return null;
}
}
protected IndexedContainer getChartContainer(double allCount, double reportCount) throws Property.ReadOnlyException {
IndexedContainer container = new IndexedContainer();
Double reportPercent = allCount > 0 ? reportCount * 100 / allCount : 0;
Double restPercent = 100 - reportPercent;
Color[] colors = new VaadinTheme().getColors();
container.addContainerProperty("label", String.class, null);
container.addContainerProperty("percent", Number.class, null);
container.addContainerProperty("color", Color.class, null);
Item report = container.addItem("report");
report.getItemProperty("label").setValue(t("report") + " (" + new Double(reportCount).intValue() + "/" + new Double(allCount).intValue() + ")");
report.getItemProperty("percent").setValue(reportPercent);
report.getItemProperty("color").setValue(colors[4]);
Item rest = container.addItem("rest");
rest.getItemProperty("label").setValue(t("rest") + " (" + new Double(allCount - reportCount).intValue() + "/" + new Double(allCount).intValue() + ")");
rest.getItemProperty("percent").setValue(restPercent);
rest.getItemProperty("color").setValue(colors[3]);
container.sort(new Object[]{"terrain_no"}, new boolean[]{true});
return container;
}
protected IndexedContainer getReportRestContainer(Set<Terrain> restTerrains) throws Property.ReadOnlyException {
IndexedContainer container = prepareReportContainer();
for (Terrain terrain : restTerrains) {
Item item = container.addItem(terrain.getId());
item.getItemProperty("terrain_no").setValue(terrain.getNo());
item.getItemProperty("terrain_name").setValue(terrain.getName());
item.getItemProperty("terrain_type").setValue(terrain.getType());
item.getItemProperty("notification_date").setValue(terrain.getLastNotificationDate());
if (!terrain.getAssignments().isEmpty()) {
item.getItemProperty("group").setValue(terrain.getAssignments().last().getOwner().getCaption());
}
item.getItemProperty("terrain_id").setValue(terrain.getId());
}
return container;
}
protected IndexedContainer getReportWorkContainer(Map<Terrain, TerrainWorkItem> itemsMap) throws Property.ReadOnlyException {
IndexedContainer container = prepareReportContainer();
for (Terrain terrain : itemsMap.keySet()) {
TerrainWorkItem workItem = itemsMap.get(terrain);
Item item = container.addItem(workItem.getId());
item.getItemProperty("terrain_no").setValue(workItem.getNo());
item.getItemProperty("terrain_name").setValue(workItem.getName() + " (" + workItem.getCount() + "x)");
item.getItemProperty("terrain_type").setValue(workItem.getType());
item.getItemProperty("notification_date").setValue(workItem.getDate());
item.getItemProperty("group").setValue(workItem.getGroup());
item.getItemProperty("terrain_id").setValue(workItem.getId());
}
container.sort(new Object[]{"terrain_no"}, new boolean[]{true});
return container;
}
protected Map<Terrain, TerrainWorkItem> getWorkItemsMap(Set<TerrainNotification> notifications, TerrainType type) {
Map<Terrain, TerrainWorkItem> itemsMap = new HashMap<>();
for (TerrainNotification notification : notifications) {
TerrainAssignment assignment = notification.getAssignment();
Terrain terrain = assignment.getTerrain();
TerrainWorkItem item = new TerrainWorkItem();
item.setDate(notification.getDate());
item.setGroup(assignment.getOwner().getCaption());
item.setId(terrain.getId());
item.setName(terrain.getName());
item.setNo(terrain.getNo());
item.setType(terrain.getType());
if (itemsMap.containsKey(terrain)) {
item.setCount(item.getCount() + itemsMap.get(terrain).getCount());
}
itemsMap.put(terrain, item);
}
return itemsMap;
}
protected Chart prepareReportChart(IndexedContainer container) {
ContainerDataSeries ds = new ContainerDataSeries(container);
ds.setYPropertyId("percent");
ds.setNamePropertyId("label");
ds.addAttributeToPropertyIdMapping("color", "color");
ds.setName(t("status"));
final Chart chart = new Chart();
final Configuration configuration = new Configuration();
configuration.getChart().setType(ChartType.PIE);
configuration.getTitle().setText(t("chart"));
Tooltip tooltip = new Tooltip();
tooltip.setValueDecimals(1);
tooltip.setPointFormat("{point.y}%");
configuration.setTooltip(tooltip);
PlotOptionsPie plotOptions = new PlotOptionsPie();
plotOptions.setAllowPointSelect(true);
plotOptions.setCursor(Cursor.POINTER);
configuration.setPlotOptions(plotOptions);
configuration.setSeries(ds);
chart.drawChart(configuration);
chart.setWidth("450px");
return chart;
}
protected Table prepareReportTable(IndexedContainer container) {
Table table = new Table("", container);
Converter dateConverter = new StringToDateConverter() {
@Override
protected DateFormat getFormat(Locale locale) {
return DateFormat.getDateInstance(DateFormat.MEDIUM, locale);
}
};
table.setConverter("notification_date", dateConverter);
table.setVisibleColumns(new Object[]{"terrain_type", "terrain_no", "terrain_name", "notification_date", "group", "terrain_id"});
table.setColumnHeaders(t(new String[]{"table.type", "table.no", "table.name", "table.last_notification_date", "table.owner", "table.id"}));
table.setWidth("740px");
return table;
}
protected IndexedContainer prepareReportContainer() {
IndexedContainer container = new IndexedContainer();
container.addContainerProperty("terrain_no", Long.class, null);
container.addContainerProperty("terrain_name", String.class, "");
container.addContainerProperty("terrain_type", TerrainType.class, null);
container.addContainerProperty("notification_date", Date.class, null);
container.addContainerProperty("group", String.class, "");
container.addContainerProperty("terrain_id", Long.class, null);
return container;
}
protected ComboBox getGroupsCombo() throws UnsupportedFilterException {
JPAContainer<ServiceGroup> groupsContainer = JPAContainerFactory.make(this.caEntities.getServiceGroupClass(), this.caEntityProviders.getServiceGroupEntityProvider().getEntityManager());
groupsContainer.setEntityProvider(this.caEntityProviders.getServiceGroupEntityProvider());
groupsContainer.addContainerFilter(eq("archival", false));
final ComboBox groups = new ComboBox(t("group"), groupsContainer);
groups.setConverter(new SingleSelectConverter<ServiceGroup>(groups));
groups.setItemCaptionMode(AbstractSelect.ItemCaptionMode.PROPERTY);
groups.setItemCaptionPropertyId("caption");
return groups;
}
protected ComboBox getTypesCombo() {
final ComboBox types = ComponentFactory.createEnumComboBox(t("type"), TerrainType.class);
return types;
}
protected class TerrainWorkItem {
private Long no;
private String name;
private TerrainType type;
private Date date;
private String group;
private Long id;
private Long count = 1l;
public Long getNo() {
return no;
}
public void setNo(Long no) {
this.no = no;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public TerrainType getType() {
return type;
}
public void setType(TerrainType type) {
this.type = type;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getCount() {
return count;
}
public void setCount(Long count) {
this.count = count;
}
}
}
|
package org.jboss.as.controller.transform.description;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.transform.TransformationContext;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
/**
* Checks whether an attribute should be discarded or not
*
* @author <a href="kabir.khan@jboss.com">Kabir Khan</a>
*/
public interface DiscardAttributeChecker {
/**
* Returns {@code true} if the attribute should be discarded if expressions are used
*
* @return whether to discard if expressions are used
*/
boolean isDiscardExpressions();
/**
* Returns {@code true} if the attribute should be discarded if it is undefined
*
* @return whether to discard if the attribute is undefined
*/
boolean isDiscardUndefined();
/**
* Gets whether the given operation parameter can be discarded
*
* @param address the address of the operation
* @param attributeName the name of the attribute
* @param attributeValue the value of the attribute
* @param operation the operation executed. This is unmodifiable.
* @param context the context of the transformation
*
* @return {@code true} if the operation parameter value should be discarded, {@code false} otherwise.
*/
boolean isOperationParameterDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, ModelNode operation, TransformationContext context);
/**
* Gets whether the given operation parameter can be discarded
*
* @param address the address of the operation
* @param attributeName the name of the attribute
* @param attributeValue the value of the attribute
* @param operation the operation executed. This is unmodifiable.
* @param context the context of the transformation
*
* @return {@code true} if the attribute value should be discarded, {@code false} otherwise.
*/
boolean isResourceAttributeDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, TransformationContext context);
/**
* A default implementation of DiscardAttributeChecker
*
*/
abstract class DefaultDiscardAttributeChecker implements DiscardAttributeChecker {
protected final boolean discardExpressions;
protected final boolean discardUndefined;
/**
* Constructor
*
* @param discardExpressions {@code true} if the attribute should be discarded if expressions are used
* @param discardUndefined {@code true} if the attribute should be discarded if expressions are used
*/
protected DefaultDiscardAttributeChecker(final boolean discardExpressions, final boolean discardUndefined) {
this.discardExpressions = discardExpressions;
this.discardUndefined = discardUndefined;
}
/**
* Constructor.
*
* Sets it up with {@code discardExpressions==false} and {@code discardUndefined==true}
*
*/
public DefaultDiscardAttributeChecker() {
this(false, true);
}
/** {@inheritDoc} */
public boolean isDiscardExpressions() {
return discardExpressions;
}
/** {@inheritDoc} */
public boolean isDiscardUndefined() {
return discardUndefined;
}
/** {@inheritDoc} */
@Override
public boolean isOperationParameterDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, ModelNode operation, TransformationContext context) {
return isValueDiscardable(address, attributeName, attributeValue, context);
}
/** {@inheritDoc} */
@Override
public boolean isResourceAttributeDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, TransformationContext context) {
return isValueDiscardable(address, attributeName, attributeValue, context);
}
/**
* Gets called by the default implementations of {@link #isOperationParameterDiscardable(PathAddress, String, ModelNode, ModelNode, TransformationContext)} and
* {@link #isResourceAttributeDiscardable(PathAddress, String, ModelNode, TransformationContext)}.
*
* @param address the address of the operation or resource
* @param attributeName the name of the attribute
* @param attributeValue the value of the attribute
* @param context the context of the transformation
*
* @return {@code true} if the attribute or parameter value is not understandable by the target process and so needs to be rejected, {@code false} otherwise.
*/
protected abstract boolean isValueDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, TransformationContext context);
}
/**
* A standard checker which will discard the attribute always.
*/
DiscardAttributeChecker ALWAYS = new DefaultDiscardAttributeChecker(true, true) {
@Override
public boolean isValueDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, TransformationContext context) {
return true;
}
};
/**
* A standard checker which will discard the attribute if it is undefined, as long as it is not an expression
*/
DiscardAttributeChecker UNDEFINED = new DefaultDiscardAttributeChecker(false, true) {
@Override
public boolean isValueDiscardable(PathAddress address, String attributeName, ModelNode attributeValue, TransformationContext context) {
return false;
}
};
/**
* An attribute checker that discards attributes if they are one or more allowed values
*/
public static class DiscardAttributeValueChecker extends DefaultDiscardAttributeChecker {
final ModelNode[] values;
/**
* Constructor. Discards if the attribite value is either undefined or matches one of the
* allowed values;
*
* @param values the allowed values
*/
public DiscardAttributeValueChecker(ModelNode...values) {
super(false, true);
this.values = values;
}
/**
* Constructor. Discards if the attribite value if it matches one of the
* passed in values;
*
* @param discardExpressions {@code true} if the attribute should be discarded if expressions are used
* @param discardUndefined {@code true} if the attribute should be discarded if expressions are used
* @param values the allowed values
*/
public DiscardAttributeValueChecker(boolean discardExpressions, boolean discardUndefined, ModelNode...values) {
super(discardExpressions, discardUndefined);
this.values = values;
}
@Override
protected boolean isValueDiscardable(PathAddress address, String attributeName, ModelNode attributeValue,
TransformationContext context) {
if (attributeValue.getType() != ModelType.EXPRESSION) {
for (ModelNode value : values) {
if (attributeValue.equals(value)){
return true;
}
}
}
return false;
}
}
}
|
package org.kuali.rice.core.cxf.interceptors;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.message.Message;
import org.apache.cxf.phase.AbstractPhaseInterceptor;
import org.apache.cxf.phase.Phase;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A CXF Interceptor that binds itself to the USER_LOGICAL phase to be used on inbound
* messages. This interceptor is invoked in the interceptor chain after unmarshalling
* from XML to Java has occurred. The role of this interceptor is to ensure that any
* Collection (and specifically List, Set, or Map) used in a @WebMethod is ultimately of the
* expected immutable type returned from the local service.
*/
@SuppressWarnings("unused")
public class ImmutableCollectionsInInterceptor extends AbstractPhaseInterceptor<Message> {
/**
* Instantiates an ImmutableCollectionsInInterceptor and adds it to the USER_LOGICAL phase.
*/
public ImmutableCollectionsInInterceptor() {
super(Phase.USER_LOGICAL);
}
@Override
public void handleMessage(final Message message) throws Fault {
try {
List contents = message.getContent(List.class);
for (Object o : contents) {
makeCollectionFieldsImmutable(o);
}
} catch (Exception e) {
throw new Fault(e);
}
}
@SuppressWarnings("unchecked")
void makeCollectionFieldsImmutable(Object o) throws IllegalAccessException {
Class<?> targetClass = o.getClass();
for (Field f : targetClass.getDeclaredFields()) {
f.setAccessible(true);
if (f.getType().isAssignableFrom(List.class)) {
List original = (List) f.get(o);
if (original == null) {
original = Collections.emptyList();
}
List immutable = Collections.unmodifiableList(original);
f.set(o, immutable);
} else if (f.getType().isAssignableFrom(Set.class)) {
Set original = (Set) f.get(o);
if (original == null) {
original = Collections.emptySet();
}
Set immutable = Collections.unmodifiableSet(original);
f.set(o, immutable);
} else if (f.getType().isAssignableFrom(Collection.class)) {
Collection original = (Collection) f.get(o);
if (original == null) {
original = Collections.emptyList();
}
Collection immutable = Collections.unmodifiableCollection(original);
f.set(o, immutable);
} else if (f.getType().isAssignableFrom(Map.class)) {
Map original = (Map) f.get(o);
if (original == null) {
original = Collections.emptyMap();
}
Map immutable = Collections.unmodifiableMap(original);
f.set(o, immutable);
}
f.setAccessible(false);
}
}
}
|
package org.hisp.dhis.android.core.dataapproval.internal;
import org.hisp.dhis.android.core.arch.db.stores.internal.ObjectWithoutUidStore;
import org.hisp.dhis.android.core.arch.di.internal.ObjectWithoutUidStoreProvider;
import org.hisp.dhis.android.core.arch.handlers.internal.Handler;
import org.hisp.dhis.android.core.arch.handlers.internal.ObjectWithoutUidHandlerImpl;
import org.hisp.dhis.android.core.arch.repositories.children.internal.ChildrenAppender;
import org.hisp.dhis.android.core.data.database.DatabaseAdapter;
import org.hisp.dhis.android.core.dataapproval.DataApproval;
import java.util.Collections;
import java.util.Map;
import dagger.Module;
import dagger.Provides;
import dagger.Reusable;
@Module
public class DataApprovalEntityDIModule implements ObjectWithoutUidStoreProvider<DataApproval> {
@Override
@Provides
@Reusable
public ObjectWithoutUidStore<DataApproval> store(DatabaseAdapter databaseAdapter) {
return DataApprovalStore.create(databaseAdapter);
}
@Provides
@Reusable
public Handler<DataApproval> handler(ObjectWithoutUidStore<DataApproval> dataApprovalStore) {
return new ObjectWithoutUidHandlerImpl<>(dataApprovalStore);
}
@Provides
@Reusable
Map<String, ChildrenAppender<DataApproval>> childrenAppenders() {
return Collections.emptyMap();
}
}
|
package org.yecht.ruby;
import org.yecht.BytecodeNodeHandler;
import org.yecht.Bytestring;
import org.yecht.Data;
import org.yecht.IoStrRead;
import org.yecht.JechtIO;
import org.yecht.MapPart;
import org.yecht.Node;
import org.yecht.Parser;
import org.yecht.Pointer;
import org.yecht.ImplicitScanner;
import org.yecht.MapStyle;
import org.yecht.SeqStyle;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyClass;
import org.jruby.RubyEnumerable;
import org.jruby.RubyHash;
import org.jruby.RubyModule;
import org.jruby.RubyNumeric;
import org.jruby.RubyObject;
import org.jruby.RubyString;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.Block;
import org.jruby.runtime.BlockCallback;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.runtime.ObjectAllocator;
import org.jruby.util.ByteList;
import org.jruby.util.TypeConverter;
public class YechtYAML {
public static class RubyIoStrRead implements IoStrRead {
private IRubyObject port;
public RubyIoStrRead(IRubyObject port) {
this.port = port;
}
// rb_syck_io_str_read
public int read(Pointer buf, JechtIO.Str str, int max_size, int skip) {
int len = 0;
max_size -= skip;
if(max_size <= 0) {
max_size = 0;
} else {
IRubyObject src = port;
IRubyObject n = RubyNumeric.int2fix(port.getRuntime(), max_size);
IRubyObject str2 = src.callMethod(port.getRuntime().getCurrentContext(), "read", n);
if(!str2.isNil()) {
ByteList res = str2.convertToString().getByteList();
len = res.realSize;
System.arraycopy(res.bytes, res.begin, buf.buffer, buf.start+skip, len);
}
}
len =+ skip;
buf.buffer[buf.start+len] = 0;
return len;
}
}
// yaml_org_handler
public static boolean orgHandler(IRubyObject self, org.yecht.Node n, IRubyObject[] ref) {
// TODO: implement
return false;
}
// syck_set_model
public static void setModel(IRubyObject p, IRubyObject input, IRubyObject model) {
// TODO: implement
}
// syck_parser_assign_io
public static boolean assignIO(Ruby runtime, Parser parser, IRubyObject[] pport) {
boolean taint = true;
IRubyObject tmp, port = pport[0];
if(!(tmp = port.checkStringType()).isNil()) {
taint = port.isTaint();
port = tmp;
ByteList bl = ((RubyString)port).getByteList();
parser.str(Pointer.create(bl.bytes, bl.begin), bl.realSize, null);
} else if(port.respondsTo("read")) {
if(port.respondsTo("binmode")) {
port.callMethod(runtime.getCurrentContext(), "binmode");
}
parser.str(Pointer.empty(), 0, new RubyIoStrRead(port));
} else {
throw runtime.newTypeError("instance of IO needed");
}
pport[0] = port;
return taint;
}
public static class Module {
// rb_syck_compile
@JRubyMethod(name = "compile", required = 1, module = true)
public static IRubyObject compile(IRubyObject self, IRubyObject port) {
Parser parser = Parser.newParser();
boolean taint = assignIO(self.getRuntime(), parser, new IRubyObject[] {port});
parser.handler(new BytecodeNodeHandler());
parser.errorHandler(null);
parser.implicitTyping(false);
parser.taguriExpansion(false);
long oid = parser.parse();
Bytestring sav = (Bytestring)parser.lookupSym(oid);
int len = Bytestring.strlen(sav.buffer);
ByteList bl = new ByteList(new byte[len+2], false);
bl.append(sav.buffer, 0, len);
bl.append('D');
bl.append('\n');
IRubyObject iro = RubyString.newStringLight(self.getRuntime(), bl);
if(taint) iro.setTaint(true);
return iro;
}
}
public static class Resolver {
// syck_const_find
public static IRubyObject const_find(IRubyObject self, IRubyObject const_name) {
RubyModule tclass = self.getRuntime().getObject();
RubyArray tparts = ((RubyString)const_name).split(self.getRuntime().getCurrentContext(), self.getRuntime().newString("::"));
for(int i=0; i < tparts.getLength(); i++) {
String tpart = tparts.entry(i).toString();
if(!tclass.hasConstant(tpart)) {
return self.getRuntime().getNil();
}
tclass = (RubyModule)tclass.getConstant(tpart);
}
return tclass;
}
// syck_resolver_initialize
@JRubyMethod
public static IRubyObject initialize(IRubyObject self) {
self.getInstanceVariables().setInstanceVariable("@tags", RubyHash.newHash(self.getRuntime()));
return self;
}
// syck_resolver_add_type
@JRubyMethod
public static IRubyObject add_type(IRubyObject self, IRubyObject taguri, IRubyObject cls) {
IRubyObject tags = self.callMethod(self.getRuntime().getCurrentContext(), "tags");
((RubyHash)tags).fastASet(taguri, cls);
return self.getRuntime().getNil();
}
// syck_resolver_use_types_at
@JRubyMethod
public static IRubyObject use_types_at(IRubyObject self, IRubyObject hsh) {
self.getInstanceVariables().setInstanceVariable("@tags", hsh);
return self.getRuntime().getNil();
}
// syck_resolver_detect_implicit
@JRubyMethod
public static IRubyObject detect_implicit(IRubyObject self, IRubyObject val) {
return RubyString.newEmptyString(self.getRuntime());
}
// syck_resolver_transfer
@JRubyMethod
public static IRubyObject transfer(IRubyObject self, IRubyObject type, IRubyObject val) {
final Ruby runtime = self.getRuntime();
ThreadContext ctx = runtime.getCurrentContext();
if(type.isNil() || type.convertToString().getByteList().realSize == 0) {
type = self.callMethod(ctx, "detect_implicit", val);
}
if(!(type.isNil() || type.convertToString().getByteList().realSize == 0)) {
IRubyObject colon = runtime.newString(":");
IRubyObject tags = self.callMethod(ctx, "tags");
IRubyObject target_class = ((RubyHash)tags).fastARef(type);
IRubyObject subclass = target_class;
IRubyObject obj = runtime.getNil();
if(target_class.isNil()) {
RubyArray subclass_parts = runtime.newArray();
RubyArray parts = ((RubyString)type).split(ctx, colon);
while(parts.getLength() > 1) {
subclass_parts.unshift(parts.pop(ctx));
IRubyObject partial = parts.join(ctx, colon);
target_class = ((RubyHash)tags).fastARef(partial);
if(target_class.isNil()) {
((RubyString)partial).append(colon);
target_class = ((RubyHash)tags).fastARef(partial);
}
if(!target_class.isNil()) {
subclass = target_class;
if(subclass_parts.getLength() > 0 && target_class.respondsTo("yaml_tag_subclasses?") && target_class.callMethod(ctx, "yaml_tag_subclasses?").isTrue()) {
subclass = subclass_parts.join(ctx, colon);
subclass = target_class.callMethod(ctx, "yaml_tag_read_class", subclass);
IRubyObject subclass_v = const_find(self, subclass);
if(subclass_v != runtime.getNil()) {
subclass = subclass_v;
} else if(target_class == runtime.getObject() && subclass_v == runtime.getNil()) {
target_class = ((RubyModule)runtime.getModule("YAML")).getConstant("Object");
type = subclass;
subclass = target_class;
} else {
throw runtime.newTypeError("invalid subclass");
}
}
break;
}
}
}
if(target_class.respondsTo("call")) {
obj = target_class.callMethod(ctx, "call", new IRubyObject[]{type, val});
} else {
if(target_class.respondsTo("yaml_new")) {
obj = target_class.callMethod(ctx, "yaml_new", new IRubyObject[]{subclass, type, val});
} else if(!target_class.isNil()) {
if(subclass == runtime.getBignum()) {
obj = RubyNumeric.str2inum(runtime, val.convertToString(), 10);
} else {
obj = ((RubyClass)subclass).allocate();
}
if(obj.respondsTo("yaml_initialize")) {
obj.callMethod(ctx, "yaml_initialize", new IRubyObject[]{type, val});
} else if(!obj.isNil() && val instanceof RubyHash) {
final IRubyObject _obj = obj;
RubyEnumerable.callEach(runtime, ctx, val, new BlockCallback() {
public IRubyObject call(ThreadContext _ctx, IRubyObject[] largs, Block blk) {
IRubyObject ivname = ((RubyArray)largs[0]).entry(0);
String ivn = "@" + ivname.convertToString().toString();
_obj.getInstanceVariables().setInstanceVariable(ivn, ((RubyArray)largs[0]).entry(1));
return runtime.getNil();
}
});
}
} else {
RubyArray parts = ((RubyString)type).split(ctx, colon);
IRubyObject scheme = parts.shift(ctx);
if(scheme.convertToString().toString().equals("x-private")) {
IRubyObject name = parts.join(ctx, colon);
obj = ((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("PrivateType").callMethod(ctx, "new", new IRubyObject[]{name, val});
} else {
IRubyObject domain = parts.shift(ctx);
IRubyObject name = parts.join(ctx, colon);
obj = ((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("DomainType").callMethod(ctx, "new", new IRubyObject[]{domain, name, val});
}
}
}
val = obj;
}
return val;
}
// syck_resolver_node_import
@JRubyMethod
public static IRubyObject node_import(IRubyObject self, IRubyObject node) {
final Ruby runtime = self.getRuntime();
final ThreadContext ctx = runtime.getCurrentContext();
org.yecht.Node n = (org.yecht.Node)node.dataGetStruct();
Parser parser = n.parser;
IRubyObject obj = null;
switch(n.kind) {
case Str:
Data.Str dd = (Data.Str)n.data;
obj = RubyString.newStringShared(runtime, dd.ptr.buffer, dd.ptr.start, dd.len);
break;
case Seq:
Data.Seq ds = (Data.Seq)n.data;
obj = RubyArray.newArray(runtime, ds.idx);
for(int i = 0; i < ds.idx; i++) {
((RubyArray)obj).store(i, (IRubyObject)parser.lookupSym(n.seqRead(i)));
}
break;
case Map:
Data.Map dm = (Data.Map)n.data;
obj = RubyHash.newHash(runtime);
RubyClass cMergeKey = (RubyClass)(((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("MergeKey"));
RubyClass cDefaultKey = (RubyClass)(((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("DefaultKey"));
RubyClass cHash = runtime.getHash();
RubyClass cArray = runtime.getArray();
for(int i = 0; i < dm.idx; i++) {
IRubyObject k = (IRubyObject)parser.lookupSym(n.mapRead(MapPart.Key, i));
IRubyObject v = (IRubyObject)parser.lookupSym(n.mapRead(MapPart.Value, i));
boolean skip_aset = false;
if(cMergeKey.isInstance(k)) {
if(cHash.isInstance(v)) {
IRubyObject dup = v.callMethod(ctx, "dup");
dup.callMethod(ctx, "update", obj);
obj = dup;
skip_aset = true;
} else if(cArray.isInstance(v)) {
IRubyObject end = ((RubyArray)v).pop(ctx);
if(cHash.isInstance(end)) {
final IRubyObject dup = end.callMethod(ctx, "dup");
v = ((RubyArray)v).reverse();
((RubyArray)v).append(obj);
RubyEnumerable.callEach(runtime, ctx, v, new BlockCallback() {
// syck_merge_i
public IRubyObject call(ThreadContext _ctx, IRubyObject[] largs, Block blk) {
IRubyObject entry = largs[0];
IRubyObject tmp = null;
if(!(tmp = TypeConverter.convertToTypeWithCheck(entry, runtime.getHash(), "to_hash")).isNil()) {
dup.callMethod(_ctx, "update", tmp);
}
return runtime.getNil();
}
});
obj = dup;
skip_aset = true;
}
}
} else if(cDefaultKey.isInstance(k)) {
obj.callMethod(ctx, "default=", v);
skip_aset = true;
}
if(!skip_aset) {
((RubyHash)obj).fastASet(k, v);
}
}
break;
}
if(n.type_id != null) {
obj = self.callMethod(ctx, "transfer", new IRubyObject[]{runtime.newString(n.type_id), obj});
}
return obj;
}
// syck_resolver_tagurize
@JRubyMethod
public static IRubyObject tagurize(IRubyObject self, IRubyObject val) {
IRubyObject tmp = val.checkStringType();
if(!tmp.isNil()) {
String taguri = ImplicitScanner.typeIdToUri(tmp.toString());
val = self.getRuntime().newString(taguri);
}
return val;
}
}
public static class DefaultResolver {
// syck_defaultresolver_node_import
@JRubyMethod
public static IRubyObject node_import(IRubyObject self, IRubyObject node) {
org.yecht.Node n = (org.yecht.Node)node.dataGetStruct();
IRubyObject[] _obj = new IRubyObject[]{null};
if(!orgHandler(self, n, _obj)) {
_obj[0] = self.callMethod(self.getRuntime().getCurrentContext(), "transfer", new IRubyObject[]{self.getRuntime().newString(n.type_id), _obj[0]});
}
return _obj[0];
}
// syck_defaultresolver_detect_implicit
@JRubyMethod
public static IRubyObject detect_implicit(IRubyObject self, IRubyObject val) {
IRubyObject tmp = TypeConverter.convertToTypeWithCheck(val, self.getRuntime().getString(), "to_str");
if(!tmp.isNil()) {
ByteList bl = ((RubyString)tmp).getByteList();
String type_id = ImplicitScanner.matchImplicit(Pointer.create(bl.bytes, bl.begin), bl.realSize);
return self.getRuntime().newString(type_id);
}
return RubyString.newEmptyString(self.getRuntime());
}
}
public static class GenericResolver {
// syck_genericresolver_node_import
@JRubyMethod
public static IRubyObject node_import(IRubyObject self, IRubyObject node) {
Ruby runtime = self.getRuntime();
ThreadContext ctx = runtime.getCurrentContext();
org.yecht.Node n = (org.yecht.Node)node.dataGetStruct();
Parser parser = n.parser;
IRubyObject t = runtime.getNil();
IRubyObject obj = t;
IRubyObject v = t;
IRubyObject style = t;
if(n.type_id != null) {
t = runtime.newString(n.type_id);
}
switch(n.kind) {
case Str:
Data.Str dd = (Data.Str)n.data;
v = RubyString.newStringShared(runtime, dd.ptr.buffer, dd.ptr.start, dd.len);
switch(dd.style) {
case OneQuote:
style = runtime.newSymbol("quote1");
break;
case TwoQuote:
style = runtime.newSymbol("quote2");
break;
case Fold:
style = runtime.newSymbol("fold");
break;
case Literal:
style = runtime.newSymbol("literal");
break;
case Plain:
style = runtime.newSymbol("plain");
break;
}
obj = ((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("Scalar").callMethod(ctx, "new", new IRubyObject[]{t, v, style});
break;
case Seq:
v = RubyArray.newArray(runtime, n.seqCount());
for(int i = 0; i < n.seqCount(); i++) {
((RubyArray)v).store(i, (IRubyObject)parser.lookupSym(n.seqRead(i)));
}
if(((Data.Seq)n.data).style == SeqStyle.Inline) {
style = runtime.newSymbol("inline");
}
obj = ((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("Seq").callMethod(ctx, "new", new IRubyObject[]{t, v, style});
obj.getInstanceVariables().setInstanceVariable("@kind", runtime.newSymbol("seq"));
break;
case Map:
v = RubyHash.newHash(runtime);
for(int i = 0; i < n.mapCount(); i++) {
((RubyHash)v).fastASet((IRubyObject)parser.lookupSym(n.mapRead(MapPart.Key, i)), (IRubyObject)parser.lookupSym(n.mapRead(MapPart.Value, i)));
}
if(((Data.Map)n.data).style == MapStyle.Inline) {
style = runtime.newSymbol("inline");
}
obj = ((RubyModule)((RubyModule)runtime.getModule("YAML")).getConstant("Yecht")).getConstant("Map").callMethod(ctx, "new", new IRubyObject[]{t, v, style});
obj.getInstanceVariables().setInstanceVariable("@kind", runtime.newSymbol("map"));
break;
}
return obj;
}
}
public static class YParser {
public static class Extra {
public IRubyObject data;
public IRubyObject proc;
public IRubyObject resolver;
public boolean taint;
}
public static final ObjectAllocator Allocator = new ObjectAllocator() {
// syck_parser_s_alloc
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
Parser parser = Parser.newParser();
parser.bonus = new Extra();
IRubyObject pobj = new RubyObject(runtime, klass);
pobj.dataWrapStruct(parser);
parser.setRootOnError(parser.addSym(runtime.getNil()));
return pobj;
}
};
@JRubyMethod(optional = 1)
public static IRubyObject initialize(IRubyObject self, IRubyObject[] args) {
IRubyObject options = null;
if(args.length == 0) {
options = RubyHash.newHash(self.getRuntime());
} else {
options = args[0].convertToHash();
}
self.getInstanceVariables().setInstanceVariable("@options", options);
self.getInstanceVariables().setInstanceVariable("@input", self.getRuntime().getNil());
self.getInstanceVariables().setInstanceVariable("@resolver", self.getRuntime().getNil());
return self;
}
// syck_parser_bufsize_set
@JRubyMethod(name="bufsize=")
public static IRubyObject bufsize_set(IRubyObject self, IRubyObject size) {
if(size.respondsTo("to_i")) {
int n = RubyNumeric.fix2int(size.callMethod(self.getRuntime().getCurrentContext(), "to_i"));
Parser p = (Parser)self.dataGetStruct();
p.bufsize = n;
}
return self;
}
// syck_parser_bufsize_get
@JRubyMethod
public static IRubyObject bufsize(IRubyObject self) {
Parser p = (Parser)self.dataGetStruct();
return self.getRuntime().newFixnum(p.bufsize);
}
// syck_parser_load
@JRubyMethod(required = 1, optional = 1)
public static IRubyObject load(IRubyObject self, IRubyObject[] args) {
Ruby runtime = self.getRuntime();
ThreadContext ctx = runtime.getCurrentContext();
IRubyObject port = args[0];
IRubyObject proc = null;
if(args.length > 1) {
proc = args[1];
} else {
proc = runtime.getNil();
}
IRubyObject input = ((RubyHash)self.callMethod(ctx, "options")).fastARef(runtime.newSymbol("input"));
IRubyObject model = ((RubyHash)self.callMethod(ctx, "options")).fastARef(runtime.newSymbol("Model"));
Parser parser = (Parser)self.dataGetStruct();
setModel(self, input, model);
Extra bonus = (Extra)parser.bonus;
bonus.taint = assignIO(runtime, parser, new IRubyObject[]{port});
bonus.data = RubyHash.newHash(runtime);
bonus.resolver = self.callMethod(ctx, "resolver");
if(proc.isNil()) {
bonus.proc = null;
} else {
bonus.proc = proc;
}
return (IRubyObject)parser.lookupSym(parser.parse());
}
// rb_define_method(cParser, "load_documents", syck_parser_load_documents, -1);
// rb_define_method(cParser, "set_resolver", syck_parser_set_resolver, 1);
}
public static class Node {
// rb_define_method( cNode, "initialize_copy", syck_node_init_copy, 1 );
// rb_define_method( cNode, "type_id=", syck_node_type_id_set, 1 );
// rb_define_method( cNode, "transform", syck_node_transform, 0);
}
public static class Scalar {
public static final ObjectAllocator Allocator = new ObjectAllocator() {
// syck_scalar_alloc
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
// TODO: implement
return null;
}
};
// rb_define_method( cScalar, "initialize", syck_scalar_initialize, 3 );
// rb_define_method( cScalar, "value=", syck_scalar_value_set, 1 );
// rb_define_method( cScalar, "style=", syck_scalar_style_set, 1 );
}
public static class Seq {
public static final ObjectAllocator Allocator = new ObjectAllocator() {
// syck_seq_alloc
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
// TODO: implement
return null;
}
};
// rb_define_method( cSeq, "initialize", syck_seq_initialize, 3 );
// rb_define_method( cSeq, "value=", syck_seq_value_set, 1 );
// rb_define_method( cSeq, "add", syck_seq_add_m, 1 );
// rb_define_method( cSeq, "style=", syck_seq_style_set, 1 );
}
public static class Map {
public static final ObjectAllocator Allocator = new ObjectAllocator() {
// syck_map_alloc
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
// TODO: implement
return null;
}
};
// rb_define_method( cMap, "initialize", syck_map_initialize, 3 );
// rb_define_method( cMap, "value=", syck_map_value_set, 1 );
// rb_define_method( cMap, "add", syck_map_add_m, 2 );
// rb_define_method( cMap, "style=", syck_map_style_set, 1 );
}
public static class PrivateType {
// rb_define_method( cPrivateType, "initialize", syck_privatetype_initialize, 2);
}
public static class DomainType {
// rb_define_method( cDomainType, "initialize", syck_domaintype_initialize, 3);
}
public static class YObject {
// rb_define_method( cYObject, "initialize", syck_yobject_initialize, 2);
// rb_define_method( cYObject, "yaml_initialize", syck_yobject_initialize, 2);
}
public static class BadAlias {
// rb_define_method( cBadAlias, "initialize", syck_badalias_initialize, 1);
// rb_define_method( cBadAlias, "<=>", syck_badalias_cmp, 1);
}
public static class Out {
// rb_define_method( cOut, "initialize", syck_out_initialize, 1 );
// rb_define_method( cOut, "map", syck_out_map, -1 );
// rb_define_method( cOut, "seq", syck_out_seq, -1 );
// rb_define_method( cOut, "scalar", syck_out_scalar, -1 );
}
public static class Emitter {
public static final ObjectAllocator Allocator = new ObjectAllocator() {
// syck_emitter_s_alloc
public IRubyObject allocate(Ruby runtime, RubyClass klass) {
// TODO: implement
return null;
}
};
// rb_define_method( cEmitter, "initialize", syck_emitter_reset, -1 );
// rb_define_method( cEmitter, "reset", syck_emitter_reset, -1 );
// rb_define_method( cEmitter, "emit", syck_emitter_emit, -1 );
// rb_define_method( cEmitter, "set_resolver", syck_emitter_set_resolver, 1);
// rb_define_method( cEmitter, "node_export", syck_emitter_node_export, 1);
}
}
|
package at.favre.lib.hood.interfaces;
public final class Config {
private static final String TAG = Config.class.getName();
public final boolean showZebra;
public final boolean showHighlightContent;
public final boolean autoLog;
public final boolean autoRefresh;
public final long autoRefreshIntervalMs;
public final String logTag;
public final boolean showPagesIndicator;
private Config(boolean showZebra, boolean showHighlightContent, boolean autoLog, boolean autoRefresh, long autoRefreshIntervalMs, String logTag, boolean showPagesIndicator) {
this.showZebra = showZebra;
this.showHighlightContent = showHighlightContent;
this.autoLog = autoLog;
this.autoRefresh = autoRefresh;
this.autoRefreshIntervalMs = autoRefreshIntervalMs;
this.logTag = logTag;
this.showPagesIndicator = showPagesIndicator;
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private boolean showZebra = true;
private boolean showHighlightContent = false;
private boolean autoLog = true;
private boolean autoRefresh = false;
private long autoRefreshIntervalMs = 10_000;
private String logTag = TAG;
private boolean showPagesIndicator = true;
private Builder() {
}
/**
* UI showing zebra pattern (highlighting every other row)
*
* @param showZebra
*/
public Builder setShowZebra(boolean showZebra) {
this.showZebra = showZebra;
return this;
}
/**
* UI highlighting content area with zebra color
*
* @param showHighlightContent
*/
public Builder setShowHighlightContent(boolean showHighlightContent) {
this.showHighlightContent = showHighlightContent;
return this;
}
/**
* If true will automatically log the whole page content to console when the view
* is instantiated.
*
* @param autoLogEnabled
*/
public Builder setAutoLog(boolean autoLogEnabled) {
this.autoLog = autoLogEnabled;
return this;
}
/**
* If true will auto refresh the view
*
* @param autoRefreshEnabled
*/
public Builder setAutoRefresh(boolean autoRefreshEnabled) {
this.autoRefresh = autoRefreshEnabled;
return this;
}
/**
* If true will auto refresh the view. This call can set the refresh interval in ms (min is 300ms)
*
* @param autoRefreshEnabled
* @param intervalMs min is 300ms - time between refreshes
*/
public Builder setAutoRefresh(boolean autoRefreshEnabled, long intervalMs) {
this.autoRefresh = autoRefreshEnabled;
this.autoRefreshIntervalMs = Math.max(300, intervalMs);
return this;
}
/**
* Sets the tag which is used to log debug data to console (see {@link android.util.Log#w(String, String)}
*
* @param logTag
*/
public Builder setLogTag(String logTag) {
this.logTag = logTag;
return this;
}
/**
* Enables or disables the viewpager page indicator (only visible if more than one page)
*
* @param showPagesIndicator
*/
public Builder setShowPagesIndicatorOnMultiplePages(boolean showPagesIndicator) {
this.showPagesIndicator = showPagesIndicator;
return this;
}
public Config build() {
return new Config(showZebra, showHighlightContent, autoLog, autoRefresh, autoRefreshIntervalMs, logTag, showPagesIndicator);
}
}
}
|
package jolie.net;
import com.google.gwt.user.client.rpc.SerializationException;
import com.google.gwt.user.server.rpc.RPC;
import com.google.gwt.user.server.rpc.RPCRequest;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URI;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import jolie.Interpreter;
import jolie.lang.NativeType;
import jolie.net.http.HttpMessage;
import jolie.net.http.HttpParser;
import jolie.net.http.HttpUtils;
import jolie.net.http.json.JsonUtils;
import joliex.gwt.server.JolieGWTConverter;
import jolie.net.http.Method;
import jolie.net.http.MultiPartFormDataParser;
import jolie.net.ports.Interface;
import jolie.net.protocols.CommProtocol;
import jolie.runtime.ByteArray;
import jolie.runtime.Value;
import jolie.runtime.ValueVector;
import jolie.runtime.VariablePath;
import jolie.runtime.typing.OneWayTypeDescription;
import jolie.runtime.typing.RequestResponseTypeDescription;
import jolie.runtime.typing.Type;
import jolie.runtime.typing.TypeCastingException;
import jolie.util.LocationParser;
import jolie.xml.XmlUtils;
import joliex.gwt.client.JolieService;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* HTTP protocol implementation
* @author Fabrizio Montesi
*/
public class HttpProtocol extends CommProtocol
{
private static final byte[] NOT_IMPLEMENTED_HEADER = "HTTP/1.1 501 Not Implemented".getBytes();
//private static final byte[] INTERNAL_SERVER_ERROR_HEADER = "HTTP/1.1 500 Internal Server error".getBytes();
private static class Parameters {
private static String DEBUG = "debug";
private static String COOKIES = "cookies";
private static String METHOD = "method";
private static String ALIAS = "alias";
private static String MULTIPART_HEADERS = "multipartHeaders";
private static String CONCURRENT = "concurrent";
private static class MultiPartHeaders {
private static String FILENAME = "filename";
}
}
private static class Headers {
private static String JOLIE_MESSAGE_ID = "X-Jolie-MessageID";
}
private String inputId = null;
private final Transformer transformer;
private final DocumentBuilderFactory docBuilderFactory;
private final DocumentBuilder docBuilder;
private final URI uri;
private final boolean inInputPort;
private MultiPartFormDataParser multiPartFormDataParser = null;
public final static String CRLF = new String( new char[] { 13, 10 } );
public String name()
{
return "http";
}
public boolean isThreadSafe()
{
return checkBooleanParameter( Parameters.CONCURRENT );
}
public HttpProtocol(
VariablePath configurationPath,
URI uri,
boolean inInputPort,
TransformerFactory transformerFactory,
DocumentBuilderFactory docBuilderFactory,
DocumentBuilder docBuilder
)
throws TransformerConfigurationException
{
super( configurationPath );
this.uri = uri;
this.inInputPort = inInputPort;
this.transformer = transformerFactory.newTransformer();
this.docBuilderFactory = docBuilderFactory;
this.docBuilder = docBuilder;
transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" );
}
private void valueToDocument(
Value value,
Node node,
Document doc
)
{
node.appendChild( doc.createTextNode( value.strValue() ) );
Element currentElement;
for( Entry< String, ValueVector > entry : value.children().entrySet() ) {
if ( !entry.getKey().startsWith( "@" ) ) {
for( Value val : entry.getValue() ) {
currentElement = doc.createElement( entry.getKey() );
node.appendChild( currentElement );
Map< String, ValueVector > attrs = jolie.xml.XmlUtils.getAttributesOrNull( val );
if ( attrs != null ) {
for( Entry< String, ValueVector > attrEntry : attrs.entrySet() ) {
currentElement.setAttribute(
attrEntry.getKey(),
attrEntry.getValue().first().strValue()
);
}
}
valueToDocument( val, currentElement, doc );
}
}
}
}
public String getMultipartHeaderForPart( String operationName, String partName )
{
if ( hasOperationSpecificParameter( operationName, Parameters.MULTIPART_HEADERS ) ) {
Value v = getOperationSpecificParameterFirstValue( operationName, Parameters.MULTIPART_HEADERS );
if ( v.hasChildren( partName ) ) {
v = v.getFirstChild( partName );
if ( v.hasChildren( Parameters.MultiPartHeaders.FILENAME ) ) {
v = v.getFirstChild( Parameters.MultiPartHeaders.FILENAME );
return v.strValue();
}
}
}
return null;
}
private final static String BOUNDARY = "----Jol13H77p$$Bound4r1$$";
private void send_appendCookies( CommMessage message, String hostname, StringBuilder headerBuilder )
{
Value cookieParam = null;
if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) {
cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES );
} else if ( hasParameter( Parameters.COOKIES ) ) {
cookieParam = getParameterFirstValue( Parameters.COOKIES );
}
if ( cookieParam != null ) {
Value cookieConfig;
String domain;
StringBuilder cookieSB = new StringBuilder();
for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) {
cookieConfig = entry.getValue().first();
if ( message.value().hasChildren( cookieConfig.strValue() ) ) {
domain = cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : "";
if ( domain.isEmpty() || hostname.endsWith( domain ) ) {
cookieSB
.append( entry.getKey() )
.append( '=' )
.append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() )
.append( ";" );
}
}
}
if ( cookieSB.length() > 0 ) {
headerBuilder
.append( "Cookie: " )
.append( cookieSB )
.append( CRLF );
}
}
}
private void send_appendSetCookieHeader( CommMessage message, StringBuilder headerBuilder )
{
Value cookieParam = null;
if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) {
cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES );
} else if ( hasParameter( Parameters.COOKIES ) ) {
cookieParam = getParameterFirstValue( Parameters.COOKIES );
}
if ( cookieParam != null ) {
Value cookieConfig;
for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) {
cookieConfig = entry.getValue().first();
if ( message.value().hasChildren( cookieConfig.strValue() ) ) {
headerBuilder
.append( "Set-Cookie: " )
.append( entry.getKey() ).append( '=' )
.append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() )
.append( "; expires=" )
.append( cookieConfig.hasChildren( "expires" ) ? cookieConfig.getFirstChild( "expires" ).strValue() : "" )
.append( "; domain=" )
.append( cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : "" )
.append( "; path=" )
.append( cookieConfig.hasChildren( "path" ) ? cookieConfig.getFirstChild( "path" ).strValue() : "" );
if ( cookieConfig.hasChildren( "secure" ) && cookieConfig.getFirstChild( "secure" ).intValue() > 0 ) {
headerBuilder.append( "; secure" );
}
headerBuilder.append( CRLF );
}
}
}
}
private String requestFormat = null;
private void send_appendQuerystring( Value value, String charset, StringBuilder headerBuilder )
throws IOException
{
if ( value.children().isEmpty() == false ) {
headerBuilder.append( '?' );
for( Entry< String, ValueVector > entry : value.children().entrySet() ) {
headerBuilder
.append( entry.getKey() )
.append( '=' )
.append( URLEncoder.encode( entry.getValue().first().strValue(), charset ) )
.append( '&' );
}
}
}
private void send_appendParsedAlias( String alias, Value value, String charset, StringBuilder headerBuilder )
throws IOException
{
int offset = 0;
String currStrValue;
String currKey;
StringBuilder result = new StringBuilder( alias );
Matcher m = Pattern.compile( "%(!)?\\{[^\\}]*\\}" ).matcher( alias );
while( m.find() ) {
if ( m.group( 1 ) == null ) { // We have to use URLEncoder
currKey = alias.substring( m.start() + 2, m.end() - 1 );
if ( "$".equals( currKey ) ) {
currStrValue = URLEncoder.encode( value.strValue(), charset );
} else {
currStrValue = URLEncoder.encode( value.getFirstChild( currKey ).strValue(), charset );
}
} else { // We have to insert the string raw
currKey = alias.substring( m.start() + 3, m.end() - 1 );
if ( "$".equals( currKey ) ) {
currStrValue = value.strValue();
} else {
currStrValue = value.getFirstChild( currKey ).strValue();
}
}
result.replace(
m.start() + offset, m.end() + offset,
currStrValue
);
offset += currStrValue.length() - 3 - currKey.length();
}
headerBuilder.append( result );
}
private String getCharset()
{
String charset = "UTF-8";
if ( hasParameter( "charset" ) ) {
charset = getStringParameter( "charset" );
}
return charset;
}
private String send_getFormat()
{
String format = "xml";
if ( inInputPort && requestFormat != null ) {
format = requestFormat;
requestFormat = null;
} else if ( hasParameter( "format" ) ) {
format = getStringParameter( "format" );
}
return format;
}
private static class EncodedContent {
private ByteArray content = null;
private String contentType = "";
private String contentDisposition = "";
}
private EncodedContent send_encodeContent( CommMessage message, Method method, String charset, String format )
throws IOException
{
EncodedContent ret = new EncodedContent();
if ( inInputPort == false && method == Method.GET ) {
// We are building a GET request
return ret;
}
if ( "xml".equals( format ) ) {
Document doc = docBuilder.newDocument();
Element root = doc.createElement( message.operationName() + (( inInputPort ) ? "Response" : "") );
doc.appendChild( root );
if ( message.isFault() ) {
Element faultElement = doc.createElement( message.fault().faultName() );
root.appendChild( faultElement );
valueToDocument( message.fault().value(), faultElement, doc );
} else {
valueToDocument( message.value(), root, doc );
}
Source src = new DOMSource( doc );
ByteArrayOutputStream tmpStream = new ByteArrayOutputStream();
Result dest = new StreamResult( tmpStream );
try {
transformer.transform( src, dest );
} catch( TransformerException e ) {
throw new IOException( e );
}
ret.content = new ByteArray( tmpStream.toByteArray() );
ret.contentType = "text/xml";
} else if ( "binary".equals( format ) ) {
if ( message.value().isByteArray() ) {
ret.content = (ByteArray) message.value().valueObject();
ret.contentType = "application/octet-stream";
}
} else if ( "html".equals( format ) ) {
ret.content = new ByteArray( message.value().strValue().getBytes( charset ) );
ret.contentType = "text/html";
} else if ( "multipart/form-data".equals( format ) ) {
ret.contentType = "multipart/form-data; boundary=" + BOUNDARY;
StringBuilder builder = new StringBuilder();
for( Entry< String, ValueVector > entry : message.value().children().entrySet() ) {
if ( !entry.getKey().startsWith( "@" ) ) {
builder.append( "--" ).append( BOUNDARY ).append( CRLF );
builder.append( "Content-Disposition: form-data; name=\"" ).append( entry.getKey() ).append( '\"' ).append( CRLF ).append( CRLF );
builder.append( entry.getValue().first().strValue() ).append( CRLF );
}
}
builder.append( "--" + BOUNDARY + "--" );
ret.content = new ByteArray( builder.toString().getBytes( charset ) );
} else if ( "x-www-form-urlencoded".equals( format ) ) {
ret.contentType = "application/x-www-form-urlencoded";
Iterator< Entry< String, ValueVector > > it =
message.value().children().entrySet().iterator();
Entry< String, ValueVector > entry;
StringBuilder builder = new StringBuilder();
while( it.hasNext() ) {
entry = it.next();
builder.append( entry.getKey() )
.append( "=" )
.append( URLEncoder.encode( entry.getValue().first().strValue(), "UTF-8" ) );
if ( it.hasNext() ) {
builder.append( '&' );
}
}
ret.content = new ByteArray( builder.toString().getBytes( charset ) );
} else if ( "text/x-gwt-rpc".equals( format ) ) {
ret.contentType = "text/x-gwt-rpc";
try {
if ( message.isFault() ) {
ret.content = new ByteArray(
RPC.encodeResponseForFailure( JolieService.class.getMethods()[0], JolieGWTConverter.jolieToGwtFault( message.fault() ) ).getBytes( charset )
);
} else {
joliex.gwt.client.Value v = new joliex.gwt.client.Value();
JolieGWTConverter.jolieToGwtValue( message.value(), v );
ret.content = new ByteArray(
RPC.encodeResponseForSuccess( JolieService.class.getMethods()[0], v ).getBytes( charset )
);
}
} catch( SerializationException e ) {
throw new IOException( e );
}
} else if ( "json".equals( format ) ) {
ret.contentType = "application/json";
StringBuilder jsonStringBuilder = new StringBuilder();
JsonUtils.valueToJsonString( message.value(), jsonStringBuilder );
ret.content = new ByteArray( jsonStringBuilder.toString().getBytes( charset ) );
}
return ret;
}
private void send_appendResponseHeaders( CommMessage message, StringBuilder headerBuilder )
{
String redirect = getStringParameter( "redirect" );
if ( redirect.isEmpty() ) {
headerBuilder.append( "HTTP/1.1 200 OK" + CRLF );
} else {
headerBuilder.append( "HTTP/1.1 303 See Other" + CRLF );
headerBuilder.append( "Location: " + redirect + CRLF );
}
send_appendSetCookieHeader( message, headerBuilder );
headerBuilder.append( "Server: JOLIE" ).append( CRLF );
StringBuilder cacheControlHeader = new StringBuilder();
if ( hasParameter( "cacheControl" ) ) {
Value cacheControl = getParameterFirstValue( "cacheControl" );
if ( cacheControl.hasChildren( "maxAge" ) ) {
cacheControlHeader.append( "max-age=" ).append( cacheControl.getFirstChild( "maxAge" ).intValue() );
}
}
if ( cacheControlHeader.length() > 0 ) {
headerBuilder.append( "Cache-Control: " ).append( cacheControlHeader ).append( CRLF );
}
}
private void send_appendRequestMethod( Method method, StringBuilder headerBuilder )
{
headerBuilder.append( method.id() );
}
private void send_appendRequestPath( CommMessage message, Method method, StringBuilder headerBuilder, String charset )
throws IOException
{
if ( uri.getPath().length() < 1 || uri.getPath().charAt( 0 ) != '/' ) {
headerBuilder.append( '/' );
}
headerBuilder.append( uri.getPath() );
String alias = getOperationSpecificStringParameter( message.operationName(), Parameters.ALIAS );
if ( alias.isEmpty() ) {
headerBuilder.append( message.operationName() );
} else {
send_appendParsedAlias( alias, message.value(), charset, headerBuilder );
}
if ( method == Method.GET ) {
send_appendQuerystring( message.value(), charset, headerBuilder );
}
}
private static void send_appendAuthorizationHeader( CommMessage message, StringBuilder headerBuilder )
{
if ( message.value().hasChildren( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() ) ) {
Value v = message.value().getFirstChild( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() );
//String realm = v.getFirstChild( "realm" ).strValue();
String userpass =
v.getFirstChild( "userid" ).strValue() + ":" +
v.getFirstChild( "password" ).strValue();
sun.misc.BASE64Encoder encoder = new sun.misc.BASE64Encoder();
userpass = encoder.encode( userpass.getBytes() );
headerBuilder.append( "Authorization: Basic " ).append( userpass ).append( CRLF );
}
}
private Method send_getRequestMethod( CommMessage message )
throws IOException
{
try {
Method method;
if ( hasOperationSpecificParameter( message.operationName(), Parameters.METHOD ) ) {
method = Method.fromString( getOperationSpecificStringParameter( message.operationName(), Parameters.METHOD ).toUpperCase() );
} else if ( hasParameter( Parameters.METHOD ) ) {
method = Method.fromString( getStringParameter( Parameters.METHOD ).toUpperCase() );
} else {
method = Method.POST;
}
return method;
} catch( Method.UnsupportedMethodException e ) {
throw new IOException( e );
}
}
private void send_appendRequestHeaders( CommMessage message, Method method, StringBuilder headerBuilder, String charset )
throws IOException
{
send_appendRequestMethod( method, headerBuilder );
headerBuilder.append( ' ' );
send_appendRequestPath( message, method, headerBuilder, charset );
headerBuilder.append( " HTTP/1.1" + CRLF );
headerBuilder.append( "Host: " + uri.getHost() + CRLF );
send_appendCookies( message, uri.getHost(), headerBuilder );
send_appendAuthorizationHeader( message, headerBuilder );
}
private void send_appendGenericHeaders(
CommMessage message,
EncodedContent encodedContent,
String charset,
StringBuilder headerBuilder
)
{
String param;
if ( checkBooleanParameter( "keepAlive" ) == false || channel().toBeClosed() ) {
channel().setToBeClosed( true );
headerBuilder.append( "Connection: close" + CRLF );
}
if ( checkBooleanParameter( Parameters.CONCURRENT ) ) {
headerBuilder.append( Headers.JOLIE_MESSAGE_ID ).append( ": " ).append( message.id() ).append( CRLF );
}
if ( encodedContent.content != null ) {
String contentType = getStringParameter( "contentType" );
if ( contentType.length() > 0 ) {
encodedContent.contentType = contentType;
}
headerBuilder.append( "Content-Type: " + encodedContent.contentType );
if ( charset != null ) {
headerBuilder.append( "; charset=" + charset.toLowerCase() );
}
headerBuilder.append( CRLF );
param = getStringParameter( "contentTransferEncoding" );
if ( !param.isEmpty() ) {
headerBuilder.append( "Content-Transfer-Encoding: " + param + CRLF );
}
String contentDisposition = getStringParameter( "contentDisposition" );
if ( contentDisposition.length() > 0 ) {
encodedContent.contentDisposition = contentDisposition;
headerBuilder.append( "Content-Disposition: " + encodedContent.contentDisposition + CRLF );
}
headerBuilder.append( "Content-Length: " + (encodedContent.content.size() + 2) + CRLF );
} else {
headerBuilder.append( "Content-Length: 0" + CRLF );
}
}
private void send_logDebugInfo( CharSequence header, EncodedContent encodedContent )
{
if ( checkBooleanParameter( "debug" ) ) {
StringBuilder debugSB = new StringBuilder();
debugSB.append( "[HTTP debug] Sending:\n" );
debugSB.append( header );
if (
getParameterVector( "debug" ).first().getFirstChild( "showContent" ).intValue() > 0
&& encodedContent.content != null
) {
debugSB.append( encodedContent.content.toString() );
}
Interpreter.getInstance().logInfo( debugSB.toString() );
}
}
public void send( OutputStream ostream, CommMessage message, InputStream istream )
throws IOException
{
Method method = send_getRequestMethod( message );
String charset = getCharset();
String format = send_getFormat();
EncodedContent encodedContent = send_encodeContent( message, method, charset, format );
StringBuilder headerBuilder = new StringBuilder();
if ( inInputPort ) {
// We're responding to a request
send_appendResponseHeaders( message, headerBuilder );
} else {
// We're sending a notification or a solicit
send_appendRequestHeaders( message, method, headerBuilder, charset );
}
send_appendGenericHeaders( message, encodedContent, charset, headerBuilder );
headerBuilder.append( CRLF );
send_logDebugInfo( headerBuilder, encodedContent );
inputId = message.operationName();
/*if ( charset == null ) {
charset = "UTF8";
}*/
ostream.write( headerBuilder.toString().getBytes( charset ) );
if ( encodedContent.content != null ) {
ostream.write( encodedContent.content.getBytes() );
ostream.write( CRLF.getBytes( charset ) );
}
}
private void parseXML( HttpMessage message, Value value )
throws IOException
{
try {
if ( message.size() > 0 ) {
DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
InputSource src = new InputSource( new ByteArrayInputStream( message.content() ) );
Document doc = builder.parse( src );
XmlUtils.documentToValue( doc, value );
}
} catch( ParserConfigurationException pce ) {
throw new IOException( pce );
} catch( SAXException saxe ) {
throw new IOException( saxe );
}
}
private static void parseJson( HttpMessage message, Value value )
throws IOException
{
JsonUtils.parseJsonIntoValue( new InputStreamReader( new ByteArrayInputStream( message.content() ) ), value );
}
private static void parseForm( HttpMessage message, Value value, String charset )
throws IOException
{
String line = new String( message.content(), "UTF8" );
String[] s, pair;
s = line.split( "&" );
for( int i = 0; i < s.length; i++ ) {
pair = s[i].split( "=", 2 );
value.getChildren( pair[0] ).first().setValue( URLDecoder.decode( pair[1], charset ) );
}
}
private void parseMultiPartFormData( HttpMessage message, Value value )
throws IOException
{
multiPartFormDataParser = new MultiPartFormDataParser( message, value );
multiPartFormDataParser.parse();
}
private static String parseGWTRPC( HttpMessage message, Value value )
throws IOException
{
RPCRequest request = RPC.decodeRequest( new String( message.content(), "UTF8" ) );
String operationName = (String)request.getParameters()[0];
joliex.gwt.client.Value requestValue = (joliex.gwt.client.Value)request.getParameters()[1];
JolieGWTConverter.gwtToJolieValue( requestValue, value );
return operationName;
}
private void recv_checkForSetCookie( HttpMessage message, Value value )
throws IOException
{
if ( hasParameter( Parameters.COOKIES ) ) {
String type;
Value cookies = getParameterFirstValue( Parameters.COOKIES );
Value cookieConfig;
Value v;
for( HttpMessage.Cookie cookie : message.setCookies() ) {
if ( cookies.hasChildren( cookie.name() ) ) {
cookieConfig = cookies.getFirstChild( cookie.name() );
if ( cookieConfig.isString() ) {
v = value.getFirstChild( cookieConfig.strValue() );
if ( cookieConfig.hasChildren( "type" ) ) {
type = cookieConfig.getFirstChild( "type" ).strValue();
} else {
type = "string";
}
recv_assignCookieValue( cookie.value(), v, type );
}
}
/*currValue = Value.create();
currValue.getNewChild( "expires" ).setValue( cookie.expirationDate() );
currValue.getNewChild( "path" ).setValue( cookie.path() );
currValue.getNewChild( "name" ).setValue( cookie.name() );
currValue.getNewChild( "value" ).setValue( cookie.value() );
currValue.getNewChild( "domain" ).setValue( cookie.domain() );
currValue.getNewChild( "secure" ).setValue( (cookie.secure() ? 1 : 0) );
cookieVec.add( currValue );*/
}
}
}
private void recv_assignCookieValue( String cookieValue, Value value, String typeKeyword )
throws IOException
{
NativeType type = NativeType.fromString( typeKeyword );
if ( NativeType.INT == type ) {
try {
value.setValue( new Integer( cookieValue ) );
} catch( NumberFormatException e ) {
throw new IOException( e );
}
} else if ( NativeType.LONG == type ) {
try {
value.setValue( new Long( cookieValue ) );
} catch( NumberFormatException e ) {
throw new IOException( e );
}
} else if ( NativeType.STRING == type ) {
value.setValue( cookieValue );
} else if ( NativeType.DOUBLE == type ) {
try {
value.setValue( new Double( cookieValue ) );
} catch( NumberFormatException e ) {
throw new IOException( e );
}
} else if ( NativeType.BOOL == type ) {
value.setValue( Boolean.valueOf( cookieValue ) );
} else {
value.setValue( cookieValue );
}
}
private void recv_checkForCookies( HttpMessage message, DecodedMessage decodedMessage )
throws IOException
{
Value cookies = null;
if ( hasOperationSpecificParameter( decodedMessage.operationName, Parameters.COOKIES ) ) {
cookies = getOperationSpecificParameterFirstValue( decodedMessage.operationName, Parameters.COOKIES );
} else if ( hasParameter( Parameters.COOKIES ) ) {
cookies = getParameterFirstValue( Parameters.COOKIES );
}
if ( cookies != null ) {
Value v;
String type;
for( Entry< String, String > entry : message.cookies().entrySet() ) {
if ( cookies.hasChildren( entry.getKey() ) ) {
Value cookieConfig = cookies.getFirstChild( entry.getKey() );
if ( cookieConfig.isString() ) {
v = decodedMessage.value.getFirstChild( cookieConfig.strValue() );
if ( cookieConfig.hasChildren( "type" ) ) {
type = cookieConfig.getFirstChild( "type" ).strValue();
} else {
type = "string";
}
recv_assignCookieValue( entry.getValue(), v, type );
}
}
}
}
}
private static void recv_parseQueryString( HttpMessage message, Value value )
{
String queryString = message.requestPath() == null ? "" : message.requestPath();
String[] kv = queryString.split( "\\?" );
if ( kv.length > 1 ) {
queryString = kv[1];
String[] params = queryString.split( "&" );
for( String param : params ) {
kv = param.split( "=", 2 );
if ( kv.length > 1 ) {
value.getFirstChild( kv[0] ).setValue( kv[1] );
}
}
}
}
/*
* Prints debug information about a received message
*/
private void recv_logDebugInfo( HttpMessage message )
{
StringBuilder debugSB = new StringBuilder();
debugSB.append( "[HTTP debug] Receiving:\n" );
debugSB.append( "HTTP Code: " + message.httpCode() + "\n" );
debugSB.append( "Resource: " + message.requestPath() + "\n" );
debugSB.append( "--> Header properties\n" );
for( Entry< String, String > entry : message.properties() ) {
debugSB.append( '\t' + entry.getKey() + ": " + entry.getValue() + '\n' );
}
for( HttpMessage.Cookie cookie : message.setCookies() ) {
debugSB.append( "\tset-cookie: " + cookie.toString() + '\n' );
}
for( Entry< String, String > entry : message.cookies().entrySet() ) {
debugSB.append( "\tcookie: " + entry.getKey() + '=' + entry.getValue() + '\n' );
}
if (
getParameterFirstValue( "debug" ).getFirstChild( "showContent" ).intValue() > 0
&& message.content() != null
) {
debugSB.append( "--> Message content\n" );
debugSB.append( new String( message.content() ) );
}
Interpreter.getInstance().logInfo( debugSB.toString() );
}
private void recv_parseMessage( HttpMessage message, DecodedMessage decodedMessage, String charset )
throws IOException
{
requestFormat = null;
String format = "xml";
if ( hasParameter( "format" ) ) {
format = getStringParameter( "format" );
}
String type = message.getProperty( "content-type" ).split( ";" )[0];
if ( "text/html".equals( type ) ) {
decodedMessage.value.setValue( new String( message.content() ) );
} else if ( "application/x-www-form-urlencoded".equals( type ) ) {
parseForm( message, decodedMessage.value, charset );
} else if ( "text/xml".equals( type ) ) {
parseXML( message, decodedMessage.value );
} else if ( "text/x-gwt-rpc".equals( type ) ) {
decodedMessage.operationName = parseGWTRPC( message, decodedMessage.value );
requestFormat = "text/x-gwt-rpc";
} else if ( "multipart/form-data".equals( type ) ) {
parseMultiPartFormData( message, decodedMessage.value );
} else if ( "application/octet-stream".equals( type ) || type.startsWith( "image/" )) {
decodedMessage.value.setValue( new ByteArray( message.content() ) );
} else if ( "application/json".equals( type ) ) {
parseJson( message, decodedMessage.value );
} else if ( "xml".equals( format ) || "rest".equals( format ) ) {
parseXML( message, decodedMessage.value );
} else if ( "json".equals( format ) ) {
parseJson( message, decodedMessage.value );
} else {
decodedMessage.value.setValue( new String( message.content() ) );
}
}
private void recv_checkReceivingOperation( HttpMessage message, DecodedMessage decodedMessage )
{
if ( decodedMessage.operationName == null ) {
String requestPath = message.requestPath().split( "\\?" )[0];
decodedMessage.operationName = requestPath;
Matcher m = LocationParser.RESOURCE_SEPARATOR_PATTERN.matcher( decodedMessage.operationName );
if ( m.find() ) {
int resourceStart = m.end();
if ( m.find() ) {
decodedMessage.resourcePath = requestPath.substring( resourceStart - 1, m.start() );
decodedMessage.operationName = requestPath.substring( m.end(), requestPath.length() );
}
}
}
if ( decodedMessage.resourcePath.equals( "/" ) && !channel().parentInputPort().canHandleInputOperation( decodedMessage.operationName ) ) {
String defaultOpId = getStringParameter( "default" );
if ( defaultOpId.length() > 0 ) {
Value body = decodedMessage.value;
decodedMessage.value = Value.create();
decodedMessage.value.getChildren( "data" ).add( body );
decodedMessage.value.getFirstChild( "operation" ).setValue( decodedMessage.operationName );
Value cookies = decodedMessage.value.getFirstChild( "cookies" );
for( Entry< String, String > cookie : message.cookies().entrySet() ) {
cookies.getFirstChild( cookie.getKey() ).setValue( cookie.getValue() );
}
decodedMessage.operationName = defaultOpId;
}
}
}
private void recv_checkForMultiPartHeaders( DecodedMessage decodedMessage )
{
if ( multiPartFormDataParser != null ) {
String target;
for( Entry< String, MultiPartFormDataParser.PartProperties > entry : multiPartFormDataParser.getPartPropertiesSet() ) {
if ( entry.getValue().filename() != null ) {
target = getMultipartHeaderForPart( decodedMessage.operationName, entry.getKey() );
if ( target != null ) {
decodedMessage.value.getFirstChild( target ).setValue( entry.getValue().filename() );
}
}
}
multiPartFormDataParser = null;
}
}
private void recv_checkForMessageProperties( HttpMessage message, DecodedMessage decodedMessage )
throws IOException
{
recv_checkForCookies( message, decodedMessage );
recv_checkForMultiPartHeaders( decodedMessage );// message, decodedMessage );
String property;
if (
(property=message.getProperty( "user-agent" )) != null &&
hasParameter( "userAgent" )
) {
getParameterFirstValue( "userAgent" ).setValue( property );
}
}
private static class DecodedMessage {
private String operationName = null;
private Value value = Value.create();
private String resourcePath = "/";
private long id = CommMessage.GENERIC_ID;
}
public CommMessage recv( InputStream istream, OutputStream ostream )
throws IOException
{
CommMessage retVal = null;
DecodedMessage decodedMessage = new DecodedMessage();
HttpMessage message = new HttpParser( istream ).parse();
if ( message.isSupported() == false ) {
ostream.write( NOT_IMPLEMENTED_HEADER );
ostream.write( CRLF.getBytes() );
ostream.write( CRLF.getBytes() );
ostream.flush();
return null;
}
String charset = getCharset();
if ( message.getProperty( "connection" ) != null ) {
HttpUtils.recv_checkForChannelClosing( message, channel() );
} else if ( hasParameter( "keepAlive" ) ) {
channel().setToBeClosed( checkBooleanParameter( "keepAlive" ) == false );
}
if ( checkBooleanParameter( Parameters.DEBUG ) ) {
recv_logDebugInfo( message );
}
if ( message.size() > 0 ) {
recv_parseMessage( message, decodedMessage, charset );
}
if ( checkBooleanParameter( Parameters.CONCURRENT ) ) {
String messageId = message.getProperty( Headers.JOLIE_MESSAGE_ID );
if ( messageId != null ) {
try {
decodedMessage.id = Long.parseLong( messageId );
} catch( NumberFormatException e ) {}
}
}
if ( message.isResponse() ) {
recv_checkForSetCookie( message, decodedMessage.value );
retVal = new CommMessage( decodedMessage.id, inputId, decodedMessage.resourcePath, decodedMessage.value, null );
} else if ( message.isError() == false ) {
if ( message.isGet() ) {
recv_parseQueryString( message, decodedMessage.value );
}
recv_checkReceivingOperation( message, decodedMessage );
recv_checkForMessageProperties( message, decodedMessage );
retVal = new CommMessage( decodedMessage.id, decodedMessage.operationName, decodedMessage.resourcePath, decodedMessage.value, null );
}
if ( "/".equals( retVal.resourcePath() ) && channel().parentPort() != null
&& channel().parentPort().getInterface().containsOperation( retVal.operationName() ) ) {
try {
// The message is for this service
Interface iface = channel().parentPort().getInterface();
OneWayTypeDescription oneWayTypeDescription = iface.oneWayOperations().get( retVal.operationName() );
if ( oneWayTypeDescription != null && message.isResponse() == false ) {
// We are receiving a One-Way message
oneWayTypeDescription.requestType().cast( retVal.value() );
} else {
RequestResponseTypeDescription rrTypeDescription = iface.requestResponseOperations().get( retVal.operationName() );
if ( retVal.isFault() ) {
Type faultType = rrTypeDescription.faults().get( retVal.fault().faultName() );
if ( faultType != null ) {
faultType.cast( retVal.value() );
}
} else {
if ( message.isResponse() ) {
rrTypeDescription.responseType().cast( retVal.value() );
} else {
rrTypeDescription.requestType().cast( retVal.value() );
}
}
}
} catch( TypeCastingException e ) {
// TODO: do something here?
}
}
return retVal;
}
}
|
package swift.indigo;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
import swift.clocks.Timestamp;
import swift.indigo.proto.AcquireResourcesReply;
import swift.indigo.proto.AcquireResourcesReply.AcquireReply;
import swift.indigo.proto.AcquireResourcesRequest;
import swift.indigo.proto.ReleaseResourcesRequest;
import swift.indigo.proto.TransferResourcesRequest;
import sys.net.api.Endpoint;
import sys.net.api.Envelope;
import sys.net.api.Service;
public class ResourceManagerNode implements ReservationsProtocolHandler {
protected static final long DEFAULT_QUEUE_PROCESSING_WAIT_TIME = 50;
private static final int DEFAULT_REQUEST_TRANSFER_RATIO = 3;
private static Logger logger = Logger.getLogger(ResourceManagerNode.class.getName());
private IndigoResourceManager manager;
// Incoming requests
private Queue<IndigoOperation> incomingRequestsQueue;
// Outgoing requests
private transient PriorityQueue<TransferResourcesRequest> transferRequestsQueue;
private boolean active;
private Service stub;
private Map<Timestamp, AcquireResourcesReply> replies = new ConcurrentHashMap<Timestamp, AcquireResourcesReply>();
private IndigoSequencerAndResourceManager sequencer;
private ResourceManagerNode thisManager = this;
private Set<IndigoOperation> waitingIndex;
private Map<Timestamp, IndigoOperation> alreadyProcessedTransfers;
public ResourceManagerNode(IndigoSequencerAndResourceManager sequencer, Endpoint surrogate,
final Map<String, Endpoint> endpoints) {
// Outgoing transfers queue
Queue<TransferResourcesRequest> outgoingMessages = new LinkedList<>();
// Incoming transfer requests are ordered by priority: promotes
// exclusive_lock operations
// and messages with smaller requests (size of requests was not tested)
this.transferRequestsQueue = new PriorityQueue<TransferResourcesRequest>();
// Incoming messages are ordered by FIFO order
this.incomingRequestsQueue = new LinkedList<IndigoOperation>();
this.waitingIndex = new HashSet<IndigoOperation>();
this.alreadyProcessedTransfers = new HashMap<Timestamp, IndigoOperation>();
this.manager = new IndigoResourceManager(sequencer, surrogate, endpoints, outgoingMessages);
this.stub = sequencer.stub;
this.sequencer = sequencer;
this.active = true;
final SimpleMessageBalacing messageBalancing = new SimpleMessageBalacing(DEFAULT_REQUEST_TRANSFER_RATIO,
incomingRequestsQueue, transferRequestsQueue);
// Incoming requests processor thread
new Thread(new Runnable() {
@Override
public void run() {
while (active) {
IndigoOperation request;
synchronized (thisManager) {
request = messageBalancing.nextOp();
}
if (request != null) {
request.deliverTo(thisManager);
} else {
try {
Thread.sleep(DEFAULT_QUEUE_PROCESSING_WAIT_TIME);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}).start();
// Transfer requests thread
new Thread(new Runnable() {
@Override
public void run() {
while (active) {
if (outgoingMessages.size() > 0) {
TransferResourcesRequest request = null;
synchronized (thisManager) {
if (outgoingMessages.size() > 0) {
request = outgoingMessages.remove();
} else
continue;
}
Endpoint endpoint = endpoints.get(request.getDestination());
logger.info("Asking resources: " + request);
stub.send(endpoint, request);
} else {
try {
Thread.sleep(DEFAULT_QUEUE_PROCESSING_WAIT_TIME);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}).start();
}
public void process(TransferResourcesRequest request) {
synchronized (thisManager) {
if (logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Processing TransferResourcesRequest: " + request);
TRANSFER_STATUS reply = manager.transferResources(request);
if (reply.hasTransferred()) {
alreadyProcessedTransfers.put(request.getClientTs(), request);
waitingIndex.remove(request);
}
if (logger.isLoggable(Level.INFO)) {
logger.info("SITE: " + sequencer.siteId + " Finished TransferResourcesRequest: " + request + " Reply: "
+ reply);
}
}
}
public void process(ReleaseResourcesRequest request) {
synchronized (thisManager) {
if (logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Processing ReleaseResourcesRequest " + request);
Timestamp ts = request.getClientTs();
AcquireResourcesReply arr = replies.get(ts);
if (arr != null && !arr.isReleased()) {
// replies.remove(ts);
arr.setReleased();
waitingIndex.remove(request);
if (arr.acquiredResources()) {
manager.releaseResources(arr);
} else {
logger.warning("SITE: " + sequencer.siteId + " Trying to release but did not get resources "
+ request);
}
}
if (logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Finished ReleaseResourcesRequest" + request);
}
}
public void processWithReply(Envelope conn, AcquireResourcesRequest request) {
AcquireResourcesReply reply = null;
synchronized (thisManager) {
if (logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Processing AcquireResourcesRequest " + request);
// if (request.getRequests().size() > 0) {
reply = manager.acquireResources(request);
if (reply.acquiredStatus().equals(AcquireReply.YES)) {
replies.put(request.getClientTs(), reply);
}
// } else {
// reply = new AcquireResourcesReply(AcquireReply.NO_RESOURCES,
// sequencer.clocks.currentClockCopy());
if (logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Finished AcquireResourcesRequest " + request + " Reply: "
+ reply);
waitingIndex.remove(request.getClientTs());
}
conn.reply(reply);
}
/**
* Message handlers
*/
@Override
public void onReceive(Envelope conn, AcquireResourcesRequest request) {
request.setHandler(conn);
AcquireResourcesReply reply = null;
if (request.getRequests().size() == 0) {
reply = new AcquireResourcesReply(AcquireReply.NO_RESOURCES, sequencer.clocks.currentClockCopy());
} else {
synchronized (thisManager) {
if (isDuplicate(request)) {
if (logger.isLoggable(Level.INFO))
logger.info(sequencer.siteId + " Message is already enqueued: " + request);
reply = new AcquireResourcesReply(AcquireReply.REPEATED, sequencer.clocks.currentClockCopy());
} else if (checkAcquireAlreadyProcessed(request) != null) {
if (logger.isLoggable(Level.INFO))
logger.info(sequencer.siteId + " Received an already processed message: " + request
+ " REPLY: " + replies.get(request.getClientTs()));
reply = new AcquireResourcesReply(AcquireReply.REPEATED, sequencer.clocks.currentClockCopy());
} else {
incomingRequestsQueue.add(request);
}
}
}
if (reply != null)
conn.reply(reply);
}
@Override
public void onReceive(Envelope conn, TransferResourcesRequest request) {
synchronized (thisManager) {
// Check if the transference request for the client timestamp
// was already satisfied
if (!alreadyProcessedTransfers.containsKey(request.getClientTs())) {
// Check if the message is duplicated
if (!isDuplicate(request)) {
transferRequestsQueue.add(request);
}
}
}
}
@Override
public void onReceive(Envelope conn, ReleaseResourcesRequest request) {
synchronized (thisManager) {
AcquireResourcesReply reply = replies.get(request);
if (reply == null || !reply.isReleased()) {
if (!isDuplicate(request)) {
incomingRequestsQueue.add(request);
}
}
}
}
@Override
public void onReceive(Envelope conn, AcquireResourcesReply request) {
logger.warning("RPC " + request.getClass() + " not implemented!");
}
/**
* Private methods
*/
// If messages is already enqueued for processing ignore new request
private boolean isDuplicate(IndigoOperation request) {
if (!waitingIndex.add(request))
return true;
else
return false;
}
private AcquireResourcesReply checkAcquireAlreadyProcessed(AcquireResourcesRequest request) {
AcquireResourcesReply reply = replies.get(request.getClientTs());
if (reply != null && logger.isLoggable(Level.INFO))
logger.info("SITE: " + sequencer.siteId + " Reply from cache: " + reply);
if (reply != null)
return reply;
return null;
}
}
class FIFOClassQueue<T> {
Queue<Queue<T>> orderedByPriority;
public FIFOClassQueue(Queue<Queue<T>> orderedByPriority) {
this.orderedByPriority = orderedByPriority;
}
public synchronized T nextElement() {
for (Queue<T> queue : orderedByPriority) {
if (!queue.isEmpty()) {
return queue.remove();
}
}
return null;
}
}
class SimpleMessageBalacing {
enum OPType {
TRANSFER, REQUEST
};
private AtomicInteger transfers;
private AtomicInteger requests;
private final int ratio;
private Queue<IndigoOperation> requestQueue;
private Queue<TransferResourcesRequest> transferQueue;
public SimpleMessageBalacing(int requestTransferRatio, Queue<IndigoOperation> requestQueue,
Queue<TransferResourcesRequest> transferQueue) {
this.ratio = requestTransferRatio;
this.requestQueue = requestQueue;
this.transferQueue = transferQueue;
this.requests = new AtomicInteger();
this.transfers = new AtomicInteger();
}
private void registerOp(OPType op) {
int count;
switch (op) {
case TRANSFER :
count = transfers.incrementAndGet();
transfers.set(0);
break;
case REQUEST :
count = requests.incrementAndGet();
if (count == ratio)
requests.set(0);
break;
}
}
public synchronized IndigoOperation nextOp() {
int nRequests = requests.get();
int nTransfers = transfers.get();
if (requestQueue.size() > 0 && (nRequests - nTransfers <= ratio || transferQueue.size() == 0)) {
registerOp(OPType.REQUEST);
return requestQueue.remove();
} else if (transferQueue.size() > 0) {
registerOp(OPType.TRANSFER);
return transferQueue.remove();
} else
return null;
}
}
|
package properties.papers;
import static structure.impl.other.Quantification.FORALL;
import properties.Property;
import properties.PropertyMaker;
import structure.intf.Assignment;
import structure.intf.Guard;
import structure.intf.QEA;
import creation.QEABuilder;
public class DaCapo implements PropertyMaker {
@Override
public QEA make(Property property) {
return null;
}
public static QEA makeUnsafeIter(){
QEABuilder b = new QEABuilder("unsafe_iterator");
int c = -1;
int i = -2;
int ITERATOR = 1;
int USE = 2;
int UPDATE = 3;
b.addQuantification(FORALL, c);
b.addQuantification(FORALL, i);
b.addTransition(1, ITERATOR, new int[]{c,i}, 2);
b.addTransition(2, UPDATE, new int[]{c}, 3);
b.addTransition(3, USE, new int[]{i}, 4);
b.setSkipStates(1,2,3);
b.addFinalStates(1,2,3);
QEA qea = b.make();
qea.record_event_name("iterator", ITERATOR);
qea.record_event_name("use",USE);
qea.record_event_name("update",UPDATE);
return qea;
}
public static QEA makeUnsafeMapIter(){
QEABuilder b = new QEABuilder("unsafe_map_iterator");
int m = -1;
int c = -2;
int i = -3;
int CREATE = 1;
int ITERATOR = 2;
int USE = 3;
int UPDATE = 4;
b.addQuantification(FORALL, m);
b.addQuantification(FORALL, c);
b.addQuantification(FORALL, i);
b.addTransition(1, CREATE, new int[]{m,c}, 2);
b.addTransition(2, ITERATOR, new int[]{c,i}, 3);
b.addTransition(3, UPDATE, new int[]{m}, 4);
b.addTransition(4, USE, new int[]{i}, 5);
b.setSkipStates(1,2,3,4);
b.addFinalStates(1,2,3,4);
QEA qea = b.make();
qea.record_event_name("create", CREATE);
qea.record_event_name("iterator", ITERATOR);
qea.record_event_name("use",USE);
qea.record_event_name("update",UPDATE);
return qea;
}
public static QEA makeSafeIterator(){
QEABuilder b = new QEABuilder("safe_iterator");
int i = -1;
int count = 1;
int ITERATOR = 1;
int NEXT = 2;
b.addQuantification(FORALL, i);
b.addTransition(1, ITERATOR, new int[]{i,count}, 2);
b.startTransition(2);
b.eventName(NEXT);
b.addVarArg(i);
b.addGuard(Guard.isGreaterThanConstant(count, 0));
b.addAssignment(Assignment.decrement(count));
b.endTransition(2);
b.addFinalStates(1,2);
QEA qea = b.make();
qea.record_event_name("iterator", ITERATOR);
qea.record_event_name("next",NEXT);
return qea;
}
public static QEA makeLockOrdering(){
QEABuilder b = new QEABuilder("lock_ordering");
int l1 = -1;
int l2 = -2;
int LOCK = 1;
int UNLOCK = 2;
b.addQuantification(FORALL, l1);
b.addQuantification(FORALL, l2,Guard.isIdentityLessThan(l1,l2));
// As we use next states we'll be symmetric
// which is why we have the < global guard
// l1 goes first
b.addTransition(1, LOCK, l1, 2);
b.addTransition(2, UNLOCK, l1,1);
b.addTransition(2,LOCK, l2, 3);
b.addTransition(3,UNLOCK, l2,3);
b.addTransition(3,LOCK, l1,3);
b.addTransition(3,UNLOCK, l1,3);
b.addTransition(3,LOCK, l2, 4);
b.addTransition(4,UNLOCK,l2,3);
b.addTransition(4,LOCK,l1, 5); // State 5 is failure
//l2 goes first
b.addTransition(1, LOCK, l2, 6);
b.addTransition(6, UNLOCK, l2,1);
b.addTransition(6,LOCK, l1, 7);
b.addTransition(7,UNLOCK, l1,7);
b.addTransition(7,LOCK, l2,7);
b.addTransition(7,UNLOCK, l2,7);
b.addTransition(7,LOCK, l1, 8);
b.addTransition(8,UNLOCK,l1,7);
b.addTransition(8,LOCK,l2, 5); // State 5 is failure
b.addFinalStates(1,2,3,4,6,7,8);
QEA qea = b.make();
qea.record_event_name("lock", LOCK);
qea.record_event_name("unlock",UNLOCK);
return qea;
}
/*
* I've had a go at making this a Single property
* based on the realisation that we just need to know
* that an object is inside a collection, not which
* collection it is inside
*/
public static QEA makePersistentHashes(){
QEABuilder b = new QEABuilder("persistent_hashes");
int o = -1;
int hash = 1;
int count_inside = 2;
int success = 3;
int hash_new = 4;
int ADD = 1;
int OBSERVE = 2;
int REMOVE = 3;
b.addQuantification(FORALL, o);
b.startTransition(1);
b.eventName(ADD);
b.addVarArg(o);
b.addVarArg(hash);
b.addVarArg(success);//should only take if add is suc
b.addGuard(Guard.isTrue(success));
b.addAssignment(Assignment.set(count_inside,1));
b.endTransition(2);
b.startTransition(2);
b.eventName(ADD);
b.addVarArg(o);
b.addVarArg(hash_new);
b.addVarArg(success); //should only increment if add is suc
b.addGuard(Guard.and(Guard.isTrue(success),
Guard.isEqualSem(hash,hash_new)));
b.addAssignment(Assignment.increment(count_inside));
b.endTransition(2);
b.startTransition(2);
b.eventName(REMOVE);
b.addVarArg(o);
b.addVarArg(hash_new);
b.addVarArg(success); // should only decrement if the remove is suc
b.addGuard(Guard.and(Guard.and(
Guard.isTrue(success),
Guard.isGreaterThanConstant(count_inside, 1)
),Guard.isEqualSem(hash,hash_new)));
b.addAssignment(Assignment.decrement(count_inside));
b.endTransition(2);
b.startTransition(2);
b.eventName(REMOVE);
b.addVarArg(o);
b.addVarArg(hash_new);
b.addVarArg(success); // should only decrement if the remove is suc
b.addGuard(Guard.and(Guard.and(
Guard.isTrue(success),
Guard.isSemEqualToConstant(count_inside, 1)
),Guard.isEqualSem(hash,hash_new)));
b.addAssignment(Assignment.decrement(count_inside));
b.endTransition(1);
b.startTransition(2);
b.eventName(OBSERVE);
b.addVarArg(o);
b.addVarArg(hash_new);
b.addGuard(Guard.isEqualSem(hash,hash_new));
b.endTransition(2);
b.addFinalStates(1,2);
QEA qea = b.make();
qea.record_event_name("add", ADD);
qea.record_event_name("observe",OBSERVE);
qea.record_event_name("remove",REMOVE);
return qea;
}
}
|
package com.yammer.metrics.reporting;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.MetricsRegistry;
import com.yammer.metrics.core.*;
import com.yammer.metrics.util.MetricPredicate;
import com.yammer.metrics.util.Utils;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.Thread.State;
import java.net.Socket;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.yammer.metrics.core.VirtualMachineMetrics.*;
public class GraphiteReporter implements Runnable {
private static final Logger log = LoggerFactory.getLogger(GraphiteReporter.class);
private final ScheduledExecutorService tickThread;
private final MetricsRegistry metricsRegistry;
private final String host;
private final int port;
private final String prefix;
private final MetricPredicate predicate;
private Writer writer;
/**
* Enables the graphite reporter to send data for the default metrics registry
* to graphite server with the specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param host the host name of graphite server (carbon-cache agent)
* @param port the port number on which the graphite server is listening
*/
public static void enable(long period, TimeUnit unit, String host, int port) {
enable(Metrics.defaultRegistry(), period, unit, host, port);
}
/**
* Enables the graphite reporter to send data for the given metrics registry
* to graphite server with the specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param host the host name of graphite server (carbon-cache agent)
* @param port the port number on which the graphite server is listening
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String host, int port) {
enable(metricsRegistry, period, unit, host, port, null);
}
/**
* Enables the graphite reporter to send data to graphite server with the
* specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param host the host name of graphite server (carbon-cache agent)
* @param port the port number on which the graphite server is listening
* @param prefix the string which is prepended to all metric names
*/
public static void enable(long period, TimeUnit unit, String host, int port, String prefix) {
enable(Metrics.defaultRegistry(), period, unit, host, port, prefix);
}
/**
* Enables the graphite reporter to send data to graphite server with the
* specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param host the host name of graphite server (carbon-cache agent)
* @param port the port number on which the graphite server is listening
* @param prefix the string which is prepended to all metric names
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String host, int port, String prefix) {
enable(metricsRegistry, period, unit, host, port, prefix, MetricPredicate.ALL);
}
/**
* Enables the graphite reporter to send data to graphite server with the
* specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param host the host name of graphite server (carbon-cache agent)
* @param port the port number on which the graphite server is listening
* @param prefix the string which is prepended to all metric names
* @param predicate filters metrics to be reported
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String host, int port, String prefix, MetricPredicate predicate) {
try {
final GraphiteReporter reporter = new GraphiteReporter(metricsRegistry, host, port, prefix, predicate);
reporter.start(period, unit);
} catch (Exception e) {
log.error("Error creating/starting Graphite reporter:", e);
}
}
/**
* Creates a new {@link GraphiteReporter}.
*
* @param host is graphite server
* @param port is port on which graphite server is running
* @param prefix is prepended to all names reported to graphite
* @throws IOException if there is an error connecting to the Graphite server
*/
public GraphiteReporter(String host, int port, String prefix) throws IOException {
this(Metrics.defaultRegistry(), host, port, prefix);
}
/**
* Creates a new {@link GraphiteReporter}.
*
* @param metricsRegistry the metrics registry
* @param host is graphite server
* @param port is port on which graphite server is running
* @param prefix is prepended to all names reported to graphite
* @throws IOException if there is an error connecting to the Graphite server
*/
public GraphiteReporter(MetricsRegistry metricsRegistry, String host, int port, String prefix) throws IOException {
this(metricsRegistry, host, port, prefix, MetricPredicate.ALL);
}
/**
* Creates a new {@link GraphiteReporter}.
*
* @param metricsRegistry the metrics registry
* @param host is graphite server
* @param port is port on which graphite server is running
* @param prefix is prepended to all names reported to graphite
* @param predicate filters metrics to be reported
* @throws IOException if there is an error connecting to the Graphite server
*/
public GraphiteReporter(MetricsRegistry metricsRegistry, String host, int port, String prefix, MetricPredicate predicate) throws IOException {
this.tickThread = metricsRegistry.threadPools().newScheduledThreadPool(1, "graphite-reporter");
this.metricsRegistry = metricsRegistry;
this.host = host;
this.port = port;
if (prefix != null) {
// Pre-append the "." so that we don't need to make anything conditional later.
this.prefix = prefix + ".";
} else {
this.prefix = "";
}
this.predicate = predicate;
}
/**
* Starts sending output to graphite server.
*
* @param period the period between successive displays
* @param unit the time unit of {@code period}
*/
public void start(long period, TimeUnit unit) {
tickThread.scheduleAtFixedRate(this, period, period, unit);
}
@Override
public void run() {
Socket socket = null;
try {
socket = new Socket(host, port);
writer = new OutputStreamWriter(socket.getOutputStream());
long epoch = System.currentTimeMillis() / 1000;
printVmMetrics(epoch);
printRegularMetrics(epoch);
writer.flush();
} catch (Exception e) {
log.error("Error:", e);
if (writer != null) {
try {
writer.flush();
} catch (IOException e1) {
log.error("Error while flushing writer:", e1);
}
}
} finally {
if (socket != null) {
try {
socket.close();
} catch (IOException e) {
log.error("Error while closing socket:", e);
}
}
writer = null;
}
}
private void printRegularMetrics(long epoch) {
for (Entry<String, Map<String, Metric>> entry : Utils.sortAndFilterMetrics(metricsRegistry.allMetrics(), this.predicate).entrySet()) {
for (Entry<String, Metric> subEntry : entry.getValue().entrySet()) {
final String simpleName = (entry.getKey() + "." + subEntry.getKey()).replaceAll(" ", "_");
final Metric metric = subEntry.getValue();
if (metric != null) {
try {
if (metric instanceof GaugeMetric<?>) {
printGauge((GaugeMetric<?>) metric, simpleName, epoch);
} else if (metric instanceof CounterMetric) {
printCounter((CounterMetric) metric, simpleName, epoch);
} else if (metric instanceof HistogramMetric) {
printHistogram((HistogramMetric) metric, simpleName, epoch);
} else if (metric instanceof MeterMetric) {
printMetered((MeterMetric) metric, simpleName, epoch);
} else if (metric instanceof TimerMetric) {
printTimer((TimerMetric) metric, simpleName, epoch);
}
} catch (Exception ignored) {
log.error("Error printing regular metrics:", ignored);
}
}
}
}
}
private void sendToGraphite(String data) {
try {
writer.write(data);
} catch (IOException e) {
log.error("Error sending to Graphite:", e);
}
}
private String sanitizeName(String name) {
return name.replace(' ', '-');
}
private void printGauge(GaugeMetric<?> gauge, String name, long epoch) {
sendToGraphite(String.format("%s%s.%s %s %d\n", prefix, sanitizeName(name), "value", gauge.value(), epoch));
}
private void printCounter(CounterMetric counter, String name, long epoch) {
sendToGraphite(String.format("%s%s.%s %d %d\n", prefix, sanitizeName(name), "count", counter.count(), epoch));
}
private void printMetered(Metered meter, String name, long epoch) {
final String sanitizedName = sanitizeName(name);
final StringBuilder lines = new StringBuilder();
lines.append(String.format("%s%s.%s %d %d\n", prefix, sanitizedName, "count", meter.count(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "meanRate", meter.meanRate(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "1MinuteRate", meter.oneMinuteRate(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "5MinuteRate", meter.fiveMinuteRate(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "15MinuteRate", meter.fifteenMinuteRate(), epoch));
sendToGraphite(lines.toString());
}
private void printHistogram(HistogramMetric histogram, String name, long epoch) {
final String sanitizedName = sanitizeName(name);
final double[] percentiles = histogram.percentiles(0.5, 0.75, 0.95, 0.98, 0.99, 0.999);
final StringBuilder lines = new StringBuilder();
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "min", histogram.min(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "max", histogram.max(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "mean", histogram.mean(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "stddev", histogram.stdDev(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "median", percentiles[0], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "75percentile", percentiles[1], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "95percentile", percentiles[2], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "98percentile", percentiles[3], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "99percentile", percentiles[4], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "999percentile", percentiles[5], epoch));
sendToGraphite(lines.toString());
}
private void printTimer(TimerMetric timer, String name, long epoch) {
printMetered(timer, name, epoch);
final String sanitizedName = sanitizeName(name);
final double[] percentiles = timer.percentiles(0.5, 0.75, 0.95, 0.98, 0.99, 0.999);
final StringBuilder lines = new StringBuilder();
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "min", timer.min(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "max", timer.max(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "mean", timer.mean(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "stddev", timer.stdDev(), epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "median", percentiles[0], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "75percentile", percentiles[1], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "95percentile", percentiles[2], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "98percentile", percentiles[3], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "99percentile", percentiles[4], epoch));
lines.append(String.format("%s%s.%s %2.2f %d\n", prefix, sanitizedName, "999percentile", percentiles[5], epoch));
sendToGraphite(lines.toString());
}
private void printDoubleField(String name, double value, long epoch) {
sendToGraphite(String.format("%s%s %2.2f %d\n", prefix, sanitizedName(name), value, epoch));
}
private void printLongField(String name, long value, long epoch) {
sendToGraphite(String.format("%s%s %d %d\n", prefix, sanitizedName(name), value, epoch));
}
private void printVmMetrics(long epoch) throws IOException {
printDoubleField("jvm.memory.heap_usage", heapUsage(), epoch);
printDoubleField("jvm.memory.non_heap_usage", nonHeapUsage(), epoch);
for (Entry<String, Double> pool : memoryPoolUsage().entrySet()) {
printDoubleField("jvm.memory.memory_pool_usages." + pool.getKey(), pool.getValue(), epoch);
}
printDoubleField("jvm.daemon_thread_count", daemonThreadCount(), epoch);
printDoubleField("jvm.thread_count", threadCount(), epoch);
printDoubleField("jvm.uptime", uptime(), epoch);
printDoubleField("jvm.fd_usage", fileDescriptorUsage(), epoch);
for (Entry<State, Double> entry : threadStatePercentages().entrySet()) {
printDoubleField("jvm.thread-states." + entry.getKey().toString().toLowerCase(), entry.getValue(), epoch);
}
for (Entry<String, GarbageCollector> entry : garbageCollectors().entrySet()) {
printLongField("jvm.gc." + entry.getKey() + ".time", entry.getValue().getTime(TimeUnit.MILLISECONDS), epoch);
printLongField("jvm.gc." + entry.getKey() + ".runs", entry.getValue().getRuns(), epoch);
}
}
}
|
package de.duenndns.ssl;
import android.app.Activity;
import android.app.Application;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.Service;
import android.app.AlertDialog;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.Uri;
import android.util.Log;
import android.os.Handler;
import java.io.File;
import java.security.cert.*;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.MessageDigest;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.HashMap;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
/**
* A X509 trust manager implementation which asks the user about invalid
* certificates and memorizes their decision.
* <p>
* The certificate validity is checked using the system default X509
* TrustManager, creating a query Dialog if the check fails.
* <p>
* <b>WARNING:</b> This only works if a dedicated thread is used for
* opening sockets!
*/
public class MemorizingTrustManager implements X509TrustManager {
final static String TAG = "MemorizingTrustManager";
final static String DECISION_INTENT = "de.duenndns.ssl.DECISION";
final static String DECISION_INTENT_APP = DECISION_INTENT + ".app";
final static String DECISION_INTENT_ID = DECISION_INTENT + ".decisionId";
final static String DECISION_INTENT_CERT = DECISION_INTENT + ".cert";
final static String DECISION_INTENT_CHOICE = DECISION_INTENT + ".decisionChoice";
private final static int NOTIFICATION_ID = 100509;
static String KEYSTORE_DIR = "KeyStore";
static String KEYSTORE_FILE = "KeyStore.bks";
Context master;
NotificationManager notificationManager;
private static int decisionId = 0;
private static HashMap<Integer,MTMDecision> openDecisions = new HashMap();
Handler masterHandler;
private File keyStoreFile;
private KeyStore appKeyStore;
private X509TrustManager defaultTrustManager;
private X509TrustManager appTrustManager;
/** Creates an instance of the MemorizingTrustManager class.
*
* @param m Activity or Service to show the Dialog / Notification
*/
private MemorizingTrustManager(Context m) {
master = m;
masterHandler = new Handler();
notificationManager = (NotificationManager)master.getSystemService(Context.NOTIFICATION_SERVICE);
Application app;
if (m instanceof Service) {
app = ((Service)m).getApplication();
} else if (m instanceof Activity) {
app = ((Activity)m).getApplication();
} else throw new ClassCastException("MemorizingTrustManager context must be either Activity or Service!");
File dir = app.getDir(KEYSTORE_DIR, Context.MODE_PRIVATE);
keyStoreFile = new File(dir + File.separator + KEYSTORE_FILE);
appKeyStore = loadAppKeyStore();
defaultTrustManager = getTrustManager(null);
appTrustManager = getTrustManager(appKeyStore);
}
/**
* Returns a X509TrustManager list containing a new instance of
* TrustManagerFactory.
*
* This function is meant for convenience only. You can use it
* as follows to integrate TrustManagerFactory for HTTPS sockets:
*
* <pre>
* SSLContext sc = SSLContext.getInstance("TLS");
* sc.init(null, MemorizingTrustManager.getInstanceList(this),
* new java.security.SecureRandom());
* HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
* </pre>
* @param c Activity or Service to show the Dialog / Notification
*/
public static X509TrustManager[] getInstanceList(Context c) {
return new X509TrustManager[] { new MemorizingTrustManager(c) };
}
/**
* Changes the path for the KeyStore file.
*
* The actual filename relative to the app's directory will be
* <code>app_<i>dirname</i>/<i>filename</i></code>.
*
* @param dirname directory to store the KeyStore.
* @param filename file name for the KeyStore.
*/
public static void setKeyStoreFile(String dirname, String filename) {
KEYSTORE_DIR = dirname;
KEYSTORE_FILE = filename;
}
X509TrustManager getTrustManager(KeyStore ks) {
try {
TrustManagerFactory tmf = TrustManagerFactory.getInstance("X509");
tmf.init(ks);
for (TrustManager t : tmf.getTrustManagers()) {
if (t instanceof X509TrustManager) {
return (X509TrustManager)t;
}
}
} catch (Exception e) {
// Here, we are covering up errors. It might be more useful
// however to throw them out of the constructor so the
// embedding app knows something went wrong.
Log.e(TAG, "getTrustManager(" + ks + ")", e);
}
return null;
}
KeyStore loadAppKeyStore() {
KeyStore ks;
try {
ks = KeyStore.getInstance(KeyStore.getDefaultType());
} catch (KeyStoreException e) {
Log.e(TAG, "getAppKeyStore()", e);
return null;
}
try {
ks.load(null, null);
ks.load(new java.io.FileInputStream(keyStoreFile), "MTM".toCharArray());
} catch (java.io.FileNotFoundException e) {
Log.i(TAG, "getAppKeyStore(" + keyStoreFile + ") - file does not exist");
} catch (Exception e) {
Log.e(TAG, "getAppKeyStore(" + keyStoreFile + ")", e);
}
return ks;
}
void storeCert(X509Certificate[] chain) {
// add all certs from chain to appKeyStore
try {
for (X509Certificate c : chain)
appKeyStore.setCertificateEntry(c.getSubjectDN().toString(), c);
} catch (KeyStoreException e) {
Log.e(TAG, "storeCert(" + chain + ")", e);
return;
}
// reload appTrustManager
appTrustManager = getTrustManager(appKeyStore);
// store KeyStore to file
try {
java.io.FileOutputStream fos = new java.io.FileOutputStream(keyStoreFile);
appKeyStore.store(fos, "MTM".toCharArray());
fos.close();
} catch (Exception e) {
Log.e(TAG, "storeCert(" + keyStoreFile + ")", e);
}
}
// if the certificate is stored in the app key store, it is considered "known"
private boolean isCertKnown(X509Certificate cert) {
try {
return appKeyStore.getCertificateAlias(cert) != null;
} catch (KeyStoreException e) {
return false;
}
}
private boolean isExpiredException(Throwable e) {
do {
if (e instanceof CertificateExpiredException)
return true;
e = e.getCause();
} while (e != null);
return false;
}
public void checkCertTrusted(X509Certificate[] chain, String authType, boolean isServer)
throws CertificateException
{
Log.d(TAG, "checkCertTrusted(" + chain + ", " + authType + ", " + isServer + ")");
try {
Log.d(TAG, "checkCertTrusted: trying appTrustManager");
if (isServer)
appTrustManager.checkServerTrusted(chain, authType);
else
appTrustManager.checkClientTrusted(chain, authType);
} catch (CertificateException ae) {
// if the cert is stored in our appTrustManager, we ignore expiredness
ae.printStackTrace();
if (isExpiredException(ae)) {
Log.i(TAG, "checkCertTrusted: accepting expired certificate from keystore");
return;
}
if (isCertKnown(chain[0])) {
Log.i(TAG, "checkCertTrusted: accepting cert already stored in keystore");
return;
}
try {
Log.d(TAG, "checkCertTrusted: trying defaultTrustManager");
if (isServer)
defaultTrustManager.checkServerTrusted(chain, authType);
else
defaultTrustManager.checkClientTrusted(chain, authType);
} catch (CertificateException e) {
e.printStackTrace();
interact(chain, authType, e);
}
}
}
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
checkCertTrusted(chain, authType, false);
}
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
checkCertTrusted(chain, authType, true);
}
public X509Certificate[] getAcceptedIssuers()
{
Log.d(TAG, "getAcceptedIssuers()");
return defaultTrustManager.getAcceptedIssuers();
}
private int createDecisionId(MTMDecision d) {
int myId;
synchronized(openDecisions) {
myId = decisionId;
openDecisions.put(myId, d);
decisionId += 1;
}
return myId;
}
private static String hexString(byte[] data) {
StringBuffer si = new StringBuffer();
for (int i = 0; i < data.length; i++) {
si.append(String.format("%02x", data[i]));
if (i < data.length - 1)
si.append(":");
}
return si.toString();
}
private static String certHash(final X509Certificate cert, String digest) {
try {
MessageDigest md = MessageDigest.getInstance(digest);
md.update(cert.getEncoded());
return hexString(md.digest());
} catch (java.security.cert.CertificateEncodingException e) {
return e.getMessage();
} catch (java.security.NoSuchAlgorithmException e) {
return e.getMessage();
}
}
private String certChainMessage(final X509Certificate[] chain, CertificateException cause) {
Throwable e = cause;
Log.d(TAG, "certChainMessage for " + e);
StringBuffer si = new StringBuffer();
if (e.getCause() != null) {
e = e.getCause();
si.append(e.getLocalizedMessage());
//si.append("\n");
}
for (X509Certificate c : chain) {
si.append("\n\n");
si.append(c.getSubjectDN().toString());
si.append("\nMD5: ");
si.append(certHash(c, "MD5"));
si.append("\nSHA1: ");
si.append(certHash(c, "SHA-1"));
si.append("\nSigned by: ");
si.append(c.getIssuerDN().toString());
}
return si.toString();
}
void startActivityNotification(Intent intent, String certName) {
Notification n = new Notification(android.R.drawable.ic_lock_lock,
master.getString(R.string.mtm_notification),
System.currentTimeMillis());
PendingIntent call = PendingIntent.getActivity(master, 0, intent, 0);
n.setLatestEventInfo(master.getApplicationContext(),
master.getString(R.string.mtm_notification),
certName, call);
n.flags |= Notification.FLAG_AUTO_CANCEL;
notificationManager.notify(NOTIFICATION_ID, n);
}
void interact(final X509Certificate[] chain, String authType, CertificateException cause)
throws CertificateException
{
/* prepare the MTMDecision blocker object */
MTMDecision choice = new MTMDecision();
final int myId = createDecisionId(choice);
final String certTitle = chain[0].getSubjectDN().toString();
final String certMessage = certChainMessage(chain, cause);
BroadcastReceiver decisionReceiver = new BroadcastReceiver() {
public void onReceive(Context ctx, Intent i) { interactResult(i); }
};
master.registerReceiver(decisionReceiver, new IntentFilter(DECISION_INTENT + "/" + master.getPackageName()));
masterHandler.post(new Runnable() {
public void run() {
Intent ni = new Intent(master, MemorizingActivity.class);
ni.setData(Uri.parse(MemorizingTrustManager.class.getName() + "/" + myId));
ni.putExtra(DECISION_INTENT_APP, master.getPackageName());
ni.putExtra(DECISION_INTENT_ID, myId);
ni.putExtra(DECISION_INTENT_CERT, certMessage);
// we try to directly start the activity and fall back to
// making a notification
try {
master.startActivity(ni);
} catch (Exception e) {
Log.e(TAG, "startActivity: " + e);
startActivityNotification(ni, certMessage);
}
}
});
Log.d(TAG, "openDecisions: " + openDecisions);
Log.d(TAG, "waiting on " + myId);
try {
synchronized(choice) { choice.wait(); }
} catch (InterruptedException e) {
e.printStackTrace();
}
master.unregisterReceiver(decisionReceiver);
Log.d(TAG, "finished wait on " + myId + ": " + choice.state);
switch (choice.state) {
case MTMDecision.DECISION_ALWAYS:
storeCert(chain);
case MTMDecision.DECISION_ONCE:
break;
default:
throw (cause);
}
}
public static void interactResult(Intent i) {
int decisionId = i.getIntExtra(DECISION_INTENT_ID, MTMDecision.DECISION_INVALID);
int choice = i.getIntExtra(DECISION_INTENT_CHOICE, MTMDecision.DECISION_INVALID);
Log.d(TAG, "interactResult: " + decisionId + " chose " + choice);
Log.d(TAG, "openDecisions: " + openDecisions);
MTMDecision d;
synchronized(openDecisions) {
d = openDecisions.get(decisionId);
openDecisions.remove(decisionId);
}
synchronized(d) {
d.state = choice;
d.notify();
}
}
}
|
package com.nirima.jenkins.plugins.docker.builder;
import com.cloudbees.jenkins.plugins.sshcredentials.SSHAuthenticator;
import com.cloudbees.jenkins.plugins.sshcredentials.SSHUserListBoxModel;
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials;
import com.nirima.jenkins.plugins.docker.DockerCloud;
import com.nirima.jenkins.plugins.docker.DockerTemplate;
import com.trilead.ssh2.Connection;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.model.ItemGroup;
import hudson.plugins.sshslaves.SSHLauncher;
import hudson.security.ACL;
import hudson.security.AccessControlled;
import hudson.security.Permission;
import hudson.slaves.Cloud;
import hudson.slaves.RetentionStrategy;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.Builder;
import hudson.util.ListBoxModel;
import jenkins.model.Jenkins;
import org.kohsuke.stapler.AncestorInPath;
import org.kohsuke.stapler.DataBoundConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
/**
* Builder that adds template to all clouds.
*
* @author Jocelyn De La Rosa
*/
public class DockerBuilderNewTemplate extends Builder implements Serializable {
private static final Logger LOGGER = LoggerFactory.getLogger(DockerBuilderNewTemplate.class);
private DockerTemplate dockerTemplate;
private int version = 1;
@DataBoundConstructor
public DockerBuilderNewTemplate(DockerTemplate dockerTemplate) {
this.dockerTemplate = dockerTemplate;
}
public DockerTemplate getDockerTemplate() {
return dockerTemplate;
}
public void setDockerTemplate(DockerTemplate dockerTemplate) {
this.dockerTemplate = dockerTemplate;
}
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener)
throws InterruptedException, IOException {
final PrintStream llogger = listener.getLogger();
final String dockerImage = dockerTemplate.getDockerTemplateBase().getImage();
// Job must run as Admin as we are changing global cloud configuration here.
build.getACL().checkPermission(Jenkins.ADMINISTER);
for (Cloud c : Jenkins.getInstance().clouds) {
if (c instanceof DockerCloud && dockerImage != null) {
DockerCloud dockerCloud = (DockerCloud) c;
if (dockerCloud.getTemplate(dockerImage) == null) {
LOGGER.info("Adding new template: '{}', to cloud: '{}'", dockerImage, dockerCloud.name);
llogger.println("Adding new template: '" + dockerImage + "', to cloud: '" + dockerCloud.name + "'");
dockerCloud.addTemplate(dockerTemplate);
}
}
}
return true;
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
@Extension
public static class DescriptorImpl extends BuildStepDescriptor<Builder> {
@Override
public boolean isApplicable(Class<? extends AbstractProject> jobType) {
return true;
}
@Override
public String getDisplayName() {
return "Add a new template to all docker clouds";
}
public ListBoxModel doFillCredentialsIdItems(@AncestorInPath ItemGroup context) {
AccessControlled ac = (context instanceof AccessControlled ? (AccessControlled) context : Jenkins.getInstance());
if (!ac.hasPermission(Jenkins.ADMINISTER)) {
return new ListBoxModel();
}
return new SSHUserListBoxModel().withMatching(SSHAuthenticator.matcher(Connection.class),
CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class, context,
ACL.SYSTEM, SSHLauncher.SSH_SCHEME));
}
}
}
|
package de.fau.cs.mad.rpgpack.jackson;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Base64;
import android.util.Log;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import de.fau.cs.mad.rpgpack.ThumbnailLoader;
public class Template implements Parcelable{
@JsonIgnore
public static final String PARCELABLE_STRING = "JacksonTemplate";
/* META DATA */
private String fileAbsPath = "";
private String fileName = "";
private String templateName = "";
private String gameName = "";
private String author = "";
private String date = "";
private String description = "";
private String tagString = "";
private String iconPath = "";
/* Character */
private CharacterSheet characterSheet = null;
public Template() {
Log.d("Template", "default constructor");
characterSheet = new CharacterSheet();
}
public Template(CharacterSheet sheet) {
characterSheet = sheet;
}
@SuppressLint("SimpleDateFormat")
@JsonIgnore
public String getFileName() {
if(fileName.isEmpty()) {
final String sanitizedName = JacksonInterface.getSanitizedFileName(templateName);
//extracted following 3 lines from else part
//in if case there is an error if we don't do it that way
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd--HH-mm-ss");
Date date = new Date();
fileName = sanitizedName +" "+ format.format(date);
}
return fileName;
}
@JsonIgnore
public void setFileName(String fileName) {
this.fileName = fileName;
}
/**
* Takes over all values form the given Template
* @param otherTemplate TemplateBrowser Template
*/
public void takeOverValues(de.fau.cs.mad.rpgpack.templatebrowser.Template otherTemplate) {
templateName = otherTemplate.getTemplateName();
gameName = otherTemplate.getWorldName();
author = otherTemplate.getAuthor();
date = otherTemplate.getDate();
iconPath = otherTemplate.getIconPath();
description = otherTemplate.getDescription();
tagString = otherTemplate.getTagString();
}
/**
* @param withPrettyWriter If true the writer will indent the output.
* @return Json representation of this template
* @throws JsonProcessingException
*/
public String toJSON(boolean withPrettyWriter) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
if(withPrettyWriter) {
mapper.enable(SerializationFeature.INDENT_OUTPUT);
}
return mapper.writer().writeValueAsString(this);
}
public String getTagString() {
return tagString;
}
@JsonProperty("tags")
public void setTagString(String tagString) {
this.tagString = tagString;
}
public String getTemplateName() {
return templateName;
}
@JsonProperty("name")
public void setTemplateName(String templateName) {
this.templateName = templateName;
}
public String getGameName() {
return gameName;
}
@JsonProperty("world")
public void setGameName(String gameName) {
this.gameName = gameName;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getDescription() {
return description;
}
@JsonProperty("des")
public void setDescription(String description) {
this.description = description;
}
public CharacterSheet getCharacterSheet() {
return characterSheet;
}
@JsonProperty("charSheet")
public void setCharacterSheet(CharacterSheet characterSheet) {
this.characterSheet = characterSheet;
}
public String getIconPath() {
return iconPath;
}
@JsonProperty("icon")
public void setIconPath(String iconPath) {
this.iconPath = iconPath;
}
public String getFileAbsPath() {
return fileAbsPath;
}
@JsonIgnore
public void setFileAbsPath(String fileAbsPath) {
this.fileAbsPath = fileAbsPath;
}
@JsonIgnore
public boolean hasIcon() {
if(iconPath == null) {
return false;
}
if(iconPath.isEmpty()) {
return false;
}
return true;
}
@JsonIgnore
public boolean isIconBase64() {
if(!hasIcon()) {
return false;
}
try {
final File testFile = new File(iconPath);
if(testFile.isFile()) {
return false;
}
else {
return true;
}
}
catch(Exception e) {
return true;
}
}
@JsonIgnore
public Bitmap getIcon(final Context context) {
if(!hasIcon()) {
return null;
}
if(isIconBase64()) {
try {
final byte[] decodedBase64 = Base64.decode(iconPath, Base64.DEFAULT);
return BitmapFactory.decodeByteArray(decodedBase64, 0, decodedBase64.length);
}
catch(Exception e) {
e.printStackTrace();
return null;
}
}
else {
return ThumbnailLoader.loadThumbnail(iconPath, context);
}
}
// PARCELABLE BELOW
@Override
public int describeContents() {
return 0;
}
/*
* Ignores CharacterSheet!
* @see android.os.Parcelable#writeToParcel(android.os.Parcel, int)
*/
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(templateName);
dest.writeString(gameName);
dest.writeString(author);
dest.writeString(date);
dest.writeString(description);
dest.writeString(tagString);
dest.writeString(iconPath);
dest.writeString(fileName);
dest.writeString(fileAbsPath);
}
public static final Parcelable.Creator<Template> CREATOR = new Creator<Template>() {
@Override
public Template[] newArray(int size) {
return new Template[size];
}
@Override
public Template createFromParcel(Parcel source) {
// IMPORTANT read in same order as written (FIFO)
Template ret = new Template();
ret.templateName = source.readString();
ret.gameName = source.readString();
ret.author = source.readString();
ret.date = source.readString();
ret.description = source.readString();
ret.tagString = source.readString();
ret.iconPath = source.readString();
ret.fileName = source.readString();
ret.fileAbsPath = source.readString();
return ret;
}
};
// PARCELABLE END
}
|
package de.lmu.ifi.dbs.utilities.output;
import java.text.NumberFormat;
import java.util.Locale;
/**
* Provides several methods for formatting objects for print purposes.
*
* @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
public class Format {
/**
* Number Formatter for output purposes.
*/
public static final NumberFormat NF4 = NumberFormat.getInstance(Locale.US);
public static final NumberFormat NF8 = NumberFormat.getInstance(Locale.US);
static {
NF4.setMinimumFractionDigits(4);
NF4.setMaximumFractionDigits(4);
NF8.setMinimumFractionDigits(8);
NF8.setMaximumFractionDigits(8);
}
/**
* Formats the double d with the specified fraction digits.
*
* @param d the double array to be formatted
* @param digits the number of fraction digits
* @return a String representing the double d
*/
public static String format(final double d, int digits) {
final NumberFormat nf = NumberFormat.getInstance(Locale.US);
nf.setMaximumFractionDigits(digits);
nf.setMinimumFractionDigits(digits);
nf.setGroupingUsed(false);
return nf.format(d);
}
/**
* Formats the double array d with ', ' as separator.
*
* @param d the double array to be formatted
* @return a String representing the double array d
*/
public static String format(double[] d) {
return format(d, ", ");
}
/**
* Formats the double array d with the specified separator and the specified
* fraction digits.
*
* @param d the double array to be formatted
* @param sep the seperator between the single values of the double array,
* e.g. ','
* @param digits the number of fraction digits
* @return a String representing the double array d
*/
public static String format(double[] d, String sep, int digits) {
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < d.length; i++) {
if (i < d.length - 1) {
buffer.append(format(d[i], digits)).append(sep);
}
else {
buffer.append(format(d[i], digits));
}
}
return buffer.toString();
}
/**
* Formats the double array d with the specified separator.
*
* @param d the double array to be formatted
* @param sep the seperator between the single values of the double array,
* e.g. ','
* @return a String representing the double array d
*/
public static String format(double[] d, String sep) {
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < d.length; i++) {
if (i < d.length - 1) {
buffer.append(d[i]).append(sep);
}
else {
buffer.append(d[i]);
}
}
return buffer.toString();
}
/**
* Formats the double array d with the specified fraction digits.
*
* @param d the double array to be formatted
* @param digits the number of fraction digits
* @return a String representing the double array d
*/
public static String format(double[] d, int digits) {
return format(d, ", ", digits);
}
/**
* Returns an integer-string for the given input, that has as many leading
* zeros as to match the length of the specified maximum.
*
* @param input an integer to be formatted
* @param maximum the maximum to adapt the format to
* @return an integer-string for the given input, that has as many leading
* zeros as to match the length of the specified maximum
*/
public static String format(int input, int maximum) {
NumberFormat formatter = NumberFormat.getIntegerInstance();
formatter.setMinimumIntegerDigits(Integer.toString(maximum).length());
return formatter.format(input);
}
}
|
package edu.kit.ipd.sdq.vitruvius.framework.vsum;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.log4j.Logger;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.transaction.RecordingCommand;
import org.eclipse.emf.transaction.TransactionalEditingDomain;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.CorrespondenceInstance;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.CorrespondenceInstanceDecorator;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.Invariants;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.Mapping;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.Metamodel;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.ModelInstance;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.VURI;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.datatypes.ValidationResult;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.CorrespondenceProviding;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.MappingManaging;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.MetamodelManaging;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.ModelProviding;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.Validating;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.interfaces.ViewTypeManaging;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.internal.InternalContractsBuilder;
import edu.kit.ipd.sdq.vitruvius.framework.contracts.internal.InternalCorrespondenceInstance;
import edu.kit.ipd.sdq.vitruvius.framework.meta.correspondence.datatypes.TUID;
import edu.kit.ipd.sdq.vitruvius.framework.util.VitruviusConstants;
import edu.kit.ipd.sdq.vitruvius.framework.util.bridges.EcoreResourceBridge;
import edu.kit.ipd.sdq.vitruvius.framework.util.datatypes.Pair;
import edu.kit.ipd.sdq.vitruvius.framework.vsum.helper.FileSystemHelper;
public class VSUMImpl implements ModelProviding, CorrespondenceProviding, Validating {
private static final Logger logger = Logger.getLogger(VSUMImpl.class.getSimpleName());
private final MappingManaging mappingManaging;
private final MetamodelManaging metamodelManaging;
private final ViewTypeManaging viewTypeManaging;
protected final Map<VURI, ModelInstance> modelInstances;
private final ResourceSet resourceSet;
private final Map<Mapping, InternalCorrespondenceInstance> mapping2CorrespondenceInstanceMap;
private ClassLoader classLoader;
public VSUMImpl(final MetamodelManaging metamodelManaging, final ViewTypeManaging viewTypeManaging,
final MappingManaging mappingManaging) {
this(metamodelManaging, viewTypeManaging, mappingManaging, null);
}
public VSUMImpl(final MetamodelManaging metamodelManaging, final ViewTypeManaging viewTypeManaging,
final MappingManaging mappingManaging, final ClassLoader classLoader) {
this.metamodelManaging = metamodelManaging;
this.viewTypeManaging = viewTypeManaging;
this.mappingManaging = mappingManaging;
this.resourceSet = new ResourceSetImpl();
this.modelInstances = new HashMap<VURI, ModelInstance>();
this.mapping2CorrespondenceInstanceMap = new HashMap<Mapping, InternalCorrespondenceInstance>();
this.classLoader = classLoader;
loadVURIsOfVSMUModelInstances();
loadAndMapCorrepondenceInstances();
}
@Override
public ModelInstance getModelInstanceCopy(final VURI uri) {
// TODO Auto-generated method stub
return null;
}
/**
* Supports three cases: 1) get registered 2) create non-existing 3) get unregistered but
* existing that contains at most a root element without children. But throws an exception if an
* instance that contains more than one element exists at the uri.
*
* DECISION Since we do not throw an exception (which can happen in 3) we always return a valid
* model. Hence the caller do not have to check whether the retrieved model is null.
*/
@Override
public ModelInstance getAndLoadModelInstanceOriginal(final VURI modelURI) {
final ModelInstance modelInstance = getModelInstanceOriginal(modelURI);
try {
final TransactionalEditingDomain transactionalEditingDomain = getTransactionalEditingDomain();
RecordingCommand recordingCommand = new RecordingCommand(transactionalEditingDomain) {
@Override
protected void doExecute() {
modelInstance.load(getMetamodelByURI(modelURI).getDefaultLoadOptions());
}
};
transactionalEditingDomain.getCommandStack().execute(recordingCommand);
} catch (RuntimeException re) {
// could not load model instance --> this should only be the case when the model is not
// Existing yet
logger.info("Exception during loading of model instance " + modelInstance + " occured: " + re, re);
}
return modelInstance;
}
public ModelInstance getModelInstanceOriginal(final VURI modelURI) {
ModelInstance modelInstance = this.modelInstances.get(modelURI);
if (modelInstance == null) {
final TransactionalEditingDomain transactionalEditingDomain = getTransactionalEditingDomain();
RecordingCommand recordingCommand = new RecordingCommand(transactionalEditingDomain) {
@Override
protected void doExecute() {
// case 2 or 3
ModelInstance internalModelInstance = getOrCreateUnregisteredModelInstance(modelURI);
VSUMImpl.this.modelInstances.put(modelURI, internalModelInstance);
saveVURIsOfVSUMModelInstances();
}
};
transactionalEditingDomain.getCommandStack().execute(recordingCommand);
modelInstance = this.modelInstances.get(modelURI);
}
return modelInstance;
}
public boolean existsModelInstance(final VURI modelURI) {
return this.modelInstances.containsKey(modelURI);
}
/**
* Saves the resource for the given vuri. If the VURI is not existing yet it will be created.
*
* @param vuri
* The VURI to save
*/
@Override
public void saveExistingModelInstanceOriginal(final VURI vuri) {
saveExistingModelInstanceOriginal(vuri, null);
}
private void saveExistingModelInstanceOriginal(final VURI vuri,
final Pair<EObject, TUID> tuidToUpdateWithRootEObjectPair) {
final TransactionalEditingDomain transactionalEditingDomain = getTransactionalEditingDomain();
transactionalEditingDomain.getCommandStack().execute(new RecordingCommand(transactionalEditingDomain) {
@Override
protected void doExecute() {
ModelInstance modelInstanceToSave = getModelInstanceOriginal(vuri);
Metamodel metamodel = getMetamodelByURI(vuri);
Resource resourceToSave = modelInstanceToSave.getResource();
try {
EcoreResourceBridge.saveResource(resourceToSave, metamodel.getDefaultSaveOptions());
} catch (IOException e) {
throw new RuntimeException("Could not save VURI + " + vuri + ": " + e);
}
saveAllChangedCorrespondences(modelInstanceToSave, tuidToUpdateWithRootEObjectPair);
for (EObject root : modelInstanceToSave.getRootElements()) {
metamodel.removeRootFromTUIDCache(root);
}
}
});
}
@Override
public void saveModelInstanceOriginalWithEObjectAsOnlyContent(final VURI vuri, final EObject rootEObject,
final TUID oldTUID) {
final ModelInstance modelInstance = getAndLoadModelInstanceOriginal(vuri);
getTransactionalEditingDomain().getCommandStack()
.execute(new RecordingCommand(getTransactionalEditingDomain()) {
@Override
protected void doExecute() {
final Resource resource = modelInstance.getResource();
// clear the resource first
resource.getContents().clear();
resource.getContents().add(rootEObject);
VSUMImpl.this.saveExistingModelInstanceOriginal(vuri,
new Pair<EObject, TUID>(rootEObject, oldTUID));
}
});
}
private void saveAllChangedCorrespondences(final ModelInstance modelInstanceToSave,
final Pair<EObject, TUID> tuidToUpdateNewRootEObjectPair) {
VURI metamodeURI = modelInstanceToSave.getMetamodeURI();
Metamodel metamodel = this.metamodelManaging.getMetamodel(metamodeURI);
Set<InternalCorrespondenceInstance> allCorrespondenceInstances = getOrCreateAllCorrespondenceInstancesForMM(
metamodel);
for (InternalCorrespondenceInstance correspondenceInstance : allCorrespondenceInstances) {
if (null != tuidToUpdateNewRootEObjectPair && tuidToUpdateNewRootEObjectPair.getSecond() != null) {
correspondenceInstance.update(tuidToUpdateNewRootEObjectPair.getSecond(),
tuidToUpdateNewRootEObjectPair.getFirst());
}
if (correspondenceInstance.changedAfterLastSave()) {
if (correspondenceInstance instanceof CorrespondenceInstanceDecorator) {
saveCorrespondenceInstanceDecorators((CorrespondenceInstanceDecorator) correspondenceInstance);
}
correspondenceInstance.resetChangedAfterLastSave();
}
}
}
private void saveCorrespondenceInstanceDecorators(final CorrespondenceInstanceDecorator correspondenceInstance) {
Map<String, Object> fileExtPrefix2ObjectMap = correspondenceInstance.getFileExtPrefix2ObjectMapForSave();
for (Entry<String, Object> fileExtPrefixAndObject : fileExtPrefix2ObjectMap.entrySet()) {
String fileExtPrefix = fileExtPrefixAndObject.getKey();
String fileName = getFileNameForCorrespondenceInstanceDecorator(correspondenceInstance, fileExtPrefix);
Object object = fileExtPrefixAndObject.getValue();
FileSystemHelper.saveObjectToFile(object, fileName);
}
}
private String getFileNameForCorrespondenceInstanceDecorator(
final InternalCorrespondenceInstance correspondenceInstance, final String fileExtPrefix) {
VURI vuri = correspondenceInstance.getURI();
VURI newVURI = vuri.replaceFileExtension(fileExtPrefix + VitruviusConstants.getCorrespondencesFileExt());
return newVURI.toResolvedAbsolutePath();
}
private ModelInstance getOrCreateUnregisteredModelInstance(final VURI modelURI) {
String fileExtension = modelURI.getFileExtension();
Metamodel metamodel = this.metamodelManaging.getMetamodel(fileExtension);
if (metamodel == null) {
throw new RuntimeException("Cannot create a new model instance at the uri '" + modelURI
+ "' because no metamodel is registered for the file extension '" + fileExtension + "'!");
}
return getOrCreateUnregisteredModelInstance(modelURI, metamodel);
}
private ModelInstance getOrCreateUnregisteredModelInstance(final VURI modelURI, final Metamodel metamodel) {
ModelInstance modelInstance = loadModelInstance(modelURI, metamodel);
getOrCreateAllCorrespondenceInstancesForMM(metamodel);
return modelInstance;
}
private ModelInstance loadModelInstance(final VURI modelURI, final Metamodel metamodel) {
URI emfURI = modelURI.getEMFUri();
Resource modelResource = EcoreResourceBridge.loadResourceAtURI(emfURI, this.resourceSet,
metamodel.getDefaultLoadOptions());
ModelInstance modelInstance = new ModelInstance(modelURI, modelResource);
return modelInstance;
}
private Collection<Mapping> getAllMappings(final Metamodel metamodel) {
return this.mappingManaging.getAllMappings(metamodel);
}
public Set<InternalCorrespondenceInstance> getOrCreateAllCorrespondenceInstancesForMM(final Metamodel metamodel) {
Collection<Mapping> mappings = getAllMappings(metamodel);
Set<InternalCorrespondenceInstance> correspondenceInstances = new HashSet<InternalCorrespondenceInstance>(
null == mappings ? 0 : mappings.size());
if (null == mappings) {
logger.warn("mappings == null. No correspondence instace for MM: " + metamodel + " created."
+ "Empty correspondence list will be returned");
return correspondenceInstances;
}
for (Mapping mapping : mappings) {
InternalCorrespondenceInstance correspondenceInstance = this.mapping2CorrespondenceInstanceMap.get(mapping);
if (correspondenceInstance == null) {
correspondenceInstance = createAndRegisterCorrespondenceInstance(mapping);
}
correspondenceInstances.add(correspondenceInstance);
}
return correspondenceInstances;
}
private InternalCorrespondenceInstance createAndRegisterCorrespondenceInstance(final Mapping mapping) {
InternalCorrespondenceInstance correspondenceInstance;
VURI[] mmURIs = mapping.getMetamodelURIs();
VURI correspondencesVURI = FileSystemHelper.getCorrespondencesVURI(mmURIs);
correspondenceInstance = createCorrespondenceInstance(mapping, correspondencesVURI);
if (correspondenceInstance instanceof CorrespondenceInstanceDecorator) {
loadAndInitializeCorrespondenceInstanceDecorators((CorrespondenceInstanceDecorator) correspondenceInstance);
}
this.mapping2CorrespondenceInstanceMap.put(mapping, correspondenceInstance);
return correspondenceInstance;
}
private InternalCorrespondenceInstance createCorrespondenceInstance(final Mapping mapping,
final VURI correspondencesVURI) {
Resource correspondencesResource = this.resourceSet.createResource(correspondencesVURI.getEMFUri());
return InternalContractsBuilder.createCorrespondenceInstance(mapping, this, correspondencesVURI,
correspondencesResource);
}
private void loadAndInitializeCorrespondenceInstanceDecorators(
final CorrespondenceInstanceDecorator correspondenceInstance) {
Set<String> fileExtPrefixesForObjects = correspondenceInstance.getFileExtPrefixesForObjectsToLoad();
Map<String, Object> fileExtPrefix2ObjectMap = new HashMap<String, Object>();
for (String fileExtPrefix : fileExtPrefixesForObjects) {
String fileName = getFileNameForCorrespondenceInstanceDecorator(correspondenceInstance, fileExtPrefix);
Object loadedObject = FileSystemHelper.loadObjectFromFile(fileName, this.classLoader);
fileExtPrefix2ObjectMap.put(fileExtPrefix, loadedObject);
}
correspondenceInstance.initialize(fileExtPrefix2ObjectMap);
}
// @Override
public ModelInstance getModelInstanceOriginalForImport(final VURI uri) {
// TODO Auto-generated method stub
return null;
}
/**
* Returns the correspondenceInstance for the mapping from the metamodel at the first VURI to
* the metamodel at the second VURI or the other way round
*
* @return the found correspondenceInstance or null if there is none
*/
@Override
public InternalCorrespondenceInstance getCorrespondenceInstanceOriginal(final VURI mmAVURI, final VURI mmBVURI) {
Mapping mapping = this.mappingManaging.getMapping(mmAVURI, mmBVURI);
if (this.mapping2CorrespondenceInstanceMap.containsKey(mapping)) {
return this.mapping2CorrespondenceInstanceMap.get(mapping);
}
logger.warn("no mapping found for the metamodel at: " + mmAVURI + " and the metamodel at: " + mmBVURI);
return null;
}
/**
* Returns all correspondences instances for a given VURI. null will be returned. We are not
* creating new CorrespondenceInstance here, cause we can not guess the linked model. The method
* {@link getCorrespondenceInstanceOriginal} must be called before to create the appropriate
* correspondence instance
*
* @see edu.kit.ipd.sdq.vitruvius.framework.meta.correspondence.datatypes.CorrespondenceInstance
* @return set that contains all CorrespondenceInstances for the VURI or null if there is non
*/
@Override
public Set<InternalCorrespondenceInstance> getOrCreateAllCorrespondenceInstances(final VURI model1uri) {
Metamodel metamodelForUri = this.metamodelManaging.getMetamodel(model1uri.getFileExtension());
return getOrCreateAllCorrespondenceInstancesForMM(metamodelForUri);
}
@Override
public CorrespondenceInstance getCorrespondenceInstanceCopy(final VURI model1uri, final VURI model2uri) {
// TODO Auto-generated method stub
return null;
}
@Override
public ValidationResult validate(final Invariants invariants) {
// TODO Auto-generated method stub
return null;
}
@Override
public ValidationResult validate(final ModelInstance modelInstance, final Invariants invariants) {
// TODO Auto-generated method stub
return null;
}
@Override
public ValidationResult validate(final ModelInstance modelInstanceA, final ModelInstance modelInstanceB,
final Invariants invariants) {
// TODO Auto-generated method stub
return null;
}
private void loadVURIsOfVSMUModelInstances() {
Set<VURI> vuris = FileSystemHelper.loadVSUMvURIsFromFile();
for (VURI vuri : vuris) {
Metamodel metamodel = getMetamodelByURI(vuri);
ModelInstance modelInstance = loadModelInstance(vuri, metamodel);
this.modelInstances.put(vuri, modelInstance);
}
}
private void saveVURIsOfVSUMModelInstances() {
FileSystemHelper.saveVSUMvURIsToFile(this.modelInstances.keySet());
}
private Metamodel getMetamodelByURI(final VURI uri) {
String fileExtension = uri.getFileExtension();
return this.metamodelManaging.getMetamodel(fileExtension);
}
private void loadAndMapCorrepondenceInstances() {
Metamodel[] metamodels = this.metamodelManaging.getAllMetamodels();
for (Metamodel metamodel : metamodels) {
getOrCreateAllCorrespondenceInstancesForMM(metamodel);
}
}
@Override
public synchronized TransactionalEditingDomain getTransactionalEditingDomain() {
if (null == TransactionalEditingDomain.Factory.INSTANCE.getEditingDomain(this.resourceSet)) {
attachTransactionalEditingDomain();
}
return TransactionalEditingDomain.Factory.INSTANCE.getEditingDomain(this.resourceSet);
}
private void attachTransactionalEditingDomain() {
TransactionalEditingDomain.Factory.INSTANCE.createEditingDomain(this.resourceSet);
}
@Override
public void detachTransactionalEditingDomain() {
TransactionalEditingDomain domain = TransactionalEditingDomain.Factory.INSTANCE
.getEditingDomain(this.resourceSet);
if (domain != null) {
domain.dispose();
}
}
@Override
public void deleteModelInstanceOriginal(final VURI vuri) {
final ModelInstance modelInstance = getModelInstanceOriginal(vuri);
final Resource resource = modelInstance.getResource();
getTransactionalEditingDomain().getCommandStack()
.execute(new RecordingCommand(getTransactionalEditingDomain()) {
@Override
protected void doExecute() {
try {
resource.delete(null);
VSUMImpl.this.modelInstances.remove(modelInstance);
} catch (final IOException e) {
logger.info("Deletion of resource " + resource + " did not work. Reason: " + e);
}
}
});
}
}
|
package edu.oregonstate.cope.eclipse.installer;
import static org.junit.Assert.*;
import java.io.File;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.BeforeClass;
import org.junit.Test;
import edu.oregonstate.cope.clientRecorder.Properties;
import edu.oregonstate.cope.eclipse.COPEPlugin;
import edu.oregonstate.cope.eclipse.PopulatedWorkspaceTest;
import edu.oregonstate.cope.eclipse.SnapshotManager;
public class PluginUpdateTest extends PopulatedWorkspaceTest {
private static COPEPlugin plugin;
private static SnapshotManager snapshotManager;
private static List<String> allowedUnversionedFiles;
@BeforeClass
public static void beforeClass() throws Exception {
PopulatedWorkspaceTest.beforeClass();
plugin = COPEPlugin.getDefault();
plugin.getSnapshotManager().knowProject(PopulatedWorkspaceTest.javaProject.getProject().getName());
allowedUnversionedFiles = new ArrayList<>();
allowedUnversionedFiles.add("workspace_id");
allowedUnversionedFiles.add(COPEPlugin.getDefault()._getInstallationConfigFileName());
allowedUnversionedFiles.add(Installer.SURVEY_FILENAME);
allowedUnversionedFiles.add(Installer.EMAIL_FILENAME);
}
@Test
public void testVersionedLocalStorage() throws Exception {
assertPathHasCurrentVersion(plugin.getVersionedLocalStorage().toPath());
assertPathHasCurrentVersion(plugin.getVersionedBundleStorage().toPath());
}
private void assertPathHasCurrentVersion(Path versionedPath) {
Path path = versionedPath;
assertTrue(path.endsWith(plugin.getPluginVersion().toString()));
}
@Test
public void testEverythingIsInVersionedFiles() throws Exception {
for (File file : plugin.getLocalStorage().listFiles()) {
if (file.isDirectory())
checkDirectory(file);
if (file.isFile())
checkFile(file);
}
}
private void checkFile(File file) {
assertTrue(allowedUnversionedFiles.contains(file.getName()));
}
private void checkDirectory(File file) {
assertTrue(file.getName().matches("\\d+\\.\\d+\\.\\d+\\.qualifier"));
List<String> versionedFileChildren = Arrays.asList(file.list());
assertTrue(versionedFileChildren.contains("eventFiles"));
assertTrue(versionedFileChildren.contains("known-projects"));
}
@SuppressWarnings("static-access")
@Test
public void testSnapshotAtUpdate() throws Exception {
Properties properties = plugin.getWorkspaceProperties();
new Installer().doUpdate("v1", "v2");
boolean zipExists = false;
for (File file : plugin.getVersionedLocalStorage().listFiles()) {
if (file.toPath().toString().endsWith(".zip"))
zipExists = true;
}
assertTrue(zipExists);
}
}
|
package org.mule.examples;
import static org.junit.Assert.assertTrue;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mule.api.MuleMessage;
import org.mule.module.client.MuleClient;
import org.mule.tck.junit4.FunctionalTestCase;
public class GetCustomersListFromNetsuiteIT extends FunctionalTestCase
{
private static final String PATH_TO_TEST_PROPERTIES = "./src/test/resources/mule.test.properties";
private static final Logger LOGGER = LogManager.getLogger(GetCustomersListFromNetsuiteIT.class);
private static final String LAST_NAME_VALUE = "a";
private static final CharSequence TEST_HTML = "<h1>Netsuite Customer List</h1>";
@Override
protected String getConfigResources()
{
return "get-customer-list-from-netsuite.xml";
}
@BeforeClass
public static void init() {
final Properties props = new Properties();
try {
props.load(new FileInputStream(PATH_TO_TEST_PROPERTIES));
} catch (Exception e) {
LOGGER.error("Error occured while reading mule.test.properties", e);
}
System.setProperty("netsuite.email", props.getProperty("netsuite.email"));
System.setProperty("netsuite.password", props.getProperty("netsuite.password"));
System.setProperty("netsuite.account", props.getProperty("netsuite.account"));
System.setProperty("netsuite.roleId", props.getProperty("netsuite.roleId"));
}
@Test
public void testRetrieve() throws Exception
{
MuleClient client = new MuleClient(muleContext);
Map<String, Object> props = new HashMap<String, Object>();
props.put("http.method", "GET");
MuleMessage response = client.send("http://0.0.0.0:8081/customers?lastName=" + LAST_NAME_VALUE, null, props);
assertTrue(response.getPayloadAsString().contains(TEST_HTML));
}
}
|
package org.ccci.gto.android.common.db.support.v4.content;
import android.content.Context;
import android.database.Cursor;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import org.ccci.gto.android.common.db.AbstractDao;
import org.ccci.gto.android.common.db.Expression;
import org.ccci.gto.android.common.db.Join;
import org.ccci.gto.android.common.db.Query;
import org.ccci.gto.android.common.db.Table;
import org.ccci.gto.android.common.support.v4.content.SimpleCursorLoader;
import org.ccci.gto.android.common.util.BundleUtils;
import static org.ccci.gto.android.common.db.AbstractDao.ARG_DISTINCT;
import static org.ccci.gto.android.common.db.AbstractDao.ARG_JOINS;
import static org.ccci.gto.android.common.db.AbstractDao.ARG_ORDER_BY;
import static org.ccci.gto.android.common.db.AbstractDao.ARG_PROJECTION;
import static org.ccci.gto.android.common.db.AbstractDao.ARG_WHERE;
public class DaoCursorLoader<T> extends SimpleCursorLoader {
@NonNull
protected final AbstractDao mDao;
private boolean mDistinct = false;
@NonNull
private final Table<T> mFrom;
@NonNull
@SuppressWarnings("unchecked")
private Join<T, ?>[] mJoins = Join.NO_JOINS;
@Nullable
private Expression mWhere;
public DaoCursorLoader(@NonNull final Context context, @NonNull final AbstractDao dao, @NonNull final Class<T> type,
@Nullable final Bundle args) {
this(context, dao, Table.forClass(type), args);
}
@SuppressWarnings("unchecked")
public DaoCursorLoader(@NonNull final Context context, @NonNull final AbstractDao dao, @NonNull final Table<T> from,
@Nullable final Bundle args) {
super(context);
mDao = dao;
mFrom = from;
if (args != null) {
setDistinct(args.getBoolean(ARG_DISTINCT, false));
setJoins(BundleUtils.getParcelableArray(args, ARG_JOINS, Join.class));
setProjection(args.getStringArray(ARG_PROJECTION));
setWhere((Expression) args.getParcelable(ARG_WHERE));
setSortOrder(args.getString(ARG_ORDER_BY));
} else {
setDistinct(false);
setJoins((Join<T, ?>[]) null);
setProjection(null);
setWhere(null);
setSortOrder(null);
}
}
@Nullable
@Override
@WorkerThread
protected final Cursor getCursor() {
// build query
return mDao.getCursor(Query.select(mFrom).distinct(isDistinct()).joins(getJoins()).projection(getProjection())
.where(getWhere()).orderBy(getSortOrder()));
}
public void setDistinct(final boolean distinct) {
mDistinct = distinct;
}
public boolean isDistinct() {
return mDistinct;
}
@Override
public void setProjection(@Nullable final String[] projection) {
super.setProjection(projection != null ? projection : mDao.getFullProjection(mFrom));
}
@NonNull
public String[] getProjection() {
final String[] projection = super.getProjection();
return projection != null ? projection : mDao.getFullProjection(mFrom);
}
@SuppressWarnings("unchecked")
public void setJoins(@Nullable final Join<T, ?>... joins) {
mJoins = joins != null ? joins : Join.NO_JOINS;
}
@NonNull
public Join<T, ?>[] getJoins() {
return mJoins;
}
public void setWhere(@Nullable final Expression where) {
mWhere = where;
}
@Nullable
public Expression getWhere() {
return mWhere;
}
}
|
package org.innovateuk.ifs.grant.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.innovateuk.ifs.application.domain.FormInputResponse;
import org.innovateuk.ifs.application.repository.FormInputResponseRepository;
import org.innovateuk.ifs.competition.domain.CompetitionParticipantRole;
import org.innovateuk.ifs.competition.domain.InnovationLead;
import org.innovateuk.ifs.competition.repository.InnovationLeadRepository;
import org.innovateuk.ifs.finance.handler.ProjectFinanceHandler;
import org.innovateuk.ifs.finance.resource.ProjectFinanceResource;
import org.innovateuk.ifs.finance.resource.category.FinanceRowCostCategory;
import org.innovateuk.ifs.finance.resource.cost.FinanceRowType;
import org.innovateuk.ifs.form.domain.FormInput;
import org.innovateuk.ifs.organisation.resource.OrganisationTypeEnum;
import org.innovateuk.ifs.project.core.builder.ProjectBuilder;
import org.innovateuk.ifs.project.core.domain.PartnerOrganisation;
import org.innovateuk.ifs.project.core.domain.Project;
import org.innovateuk.ifs.project.core.domain.ProjectUser;
import org.innovateuk.ifs.project.financechecks.domain.Cost;
import org.innovateuk.ifs.project.financechecks.domain.CostCategory;
import org.innovateuk.ifs.project.monitoring.domain.MonitoringOfficer;
import org.innovateuk.ifs.project.spendprofile.domain.SpendProfile;
import org.innovateuk.ifs.project.spendprofile.repository.SpendProfileRepository;
import org.innovateuk.ifs.sil.grant.resource.Forecast;
import org.innovateuk.ifs.sil.grant.resource.Grant;
import org.innovateuk.ifs.sil.grant.resource.Participant;
import org.innovateuk.ifs.user.domain.User;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.*;
import static java.util.Arrays.asList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.innovateuk.ifs.application.builder.ApplicationBuilder.newApplication;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionBuilder.newCompetition;
import static org.innovateuk.ifs.competition.builder.InnovationLeadBuilder.newInnovationLead;
import static org.innovateuk.ifs.finance.builder.GrantClaimCostBuilder.newGrantClaimPercentage;
import static org.innovateuk.ifs.finance.builder.ExcludedCostCategoryBuilder.newExcludedCostCategory;
import static org.innovateuk.ifs.finance.builder.ProjectFinanceResourceBuilder.newProjectFinanceResource;
import static org.innovateuk.ifs.organisation.builder.OrganisationBuilder.newOrganisation;
import static org.innovateuk.ifs.project.core.builder.PartnerOrganisationBuilder.newPartnerOrganisation;
import static org.innovateuk.ifs.project.core.builder.ProjectBuilder.newProject;
import static org.innovateuk.ifs.project.core.builder.ProjectUserBuilder.newProjectUser;
import static org.innovateuk.ifs.project.core.domain.ProjectParticipantRole.*;
import static org.innovateuk.ifs.project.monitoring.builder.MonitoringOfficerBuilder.newMonitoringOfficer;
import static org.innovateuk.ifs.user.builder.UserBuilder.newUser;
import static org.innovateuk.ifs.util.CollectionFunctions.*;
import static org.innovateuk.ifs.util.MapFunctions.asMap;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.when;
@RunWith(Parameterized.class)
public class GrantMapperTest {
private static final LocalDate DEFAULT_START_DATE = LocalDate.of(2018, 1,2);
private static final ZonedDateTime DEFAULT_GOL_DATE = ZonedDateTime
.of(LocalDate.of(2018, 3,4), LocalTime.MIDNIGHT, ZoneId.of("GMT"));
private static final String OVERHEADS = "Overheads";
private static final boolean OUTPUT_TEST_JSON = true;
private static final String OUTPUT_DIRECTORY = "./build/tmp/grant-mapper-json";
private final Parameter parameter;
@Mock
private FormInputResponseRepository formInputResponseRepository;
@Mock
private SpendProfileRepository spendProfileRepository;
@Mock
private InnovationLeadRepository innovationLeadRepository;
@Mock
private ProjectFinanceHandler projectFinanceHandler;
@InjectMocks
protected GrantMapper grantMapper = new GrantMapper();
@Before
public void setupMockInjection() {
MockitoAnnotations.initMocks(this);
}
public GrantMapperTest(Parameter parameter) {
this.parameter = parameter;
}
@Test
public void mapToGrant() throws IOException {
Project project = parameter.createProject();
when(formInputResponseRepository.findOneByApplicationIdAndFormInputQuestionShortName(project.getApplication().getId(), "Project summary"))
.thenReturn(parameter.projectSummaryResponse());
when(formInputResponseRepository.findOneByApplicationIdAndFormInputQuestionShortName(project.getApplication().getId(), "Public description"))
.thenReturn(parameter.publicDescriptionResponse());
when(spendProfileRepository.findOneByProjectIdAndOrganisationId(any(), any()))
.thenAnswer(i -> Optional.of(parameter.createSpendProfile()));
Map<FinanceRowType, FinanceRowCostCategory> industrialOrganisationFinances = asMap(
FinanceRowType.FINANCE, newExcludedCostCategory().withCosts(
newGrantClaimPercentage().
withGrantClaimPercentage(BigDecimal.valueOf(30)).
build(1)).
build());
ProjectFinanceResource projectFinance = newProjectFinanceResource()
.withFinanceOrganisationDetails(industrialOrganisationFinances)
.withMaximumFundingLevel(50)
.build();
when(projectFinanceHandler.getProjectOrganisationFinances(any())).thenReturn(serviceSuccess(projectFinance));
List<InnovationLead> innovationLeads = newInnovationLead().
withUser(newUser().withEmailAddress("il1@example.com", "il2@example.com").buildArray(2, User.class)).
build(2);
when(innovationLeadRepository.getByCompetitionIdAndRole(project.getApplication().getCompetition().getId(),
CompetitionParticipantRole.INNOVATION_LEAD)).thenReturn(innovationLeads);
Grant grant = grantMapper.mapToGrant(project);
if (OUTPUT_TEST_JSON) {
File outDirectory = new File(OUTPUT_DIRECTORY);
if (!outDirectory.exists()) {
assertTrue(outDirectory.mkdir());
}
Files.write(Paths.get(OUTPUT_DIRECTORY + "/grant-" + parameter.name() + ".json"),
new ObjectMapper().writeValueAsString(grant).getBytes());
}
assertThat(grant.getId(), equalTo(parameter.applicationId()));
assertThat(grant.getCompetitionCode(), equalTo(parameter.competitionId()));
assertThat(grant.getPublicDescription(), equalTo(parameter.publicDescription()));
assertThat(grant.getSummary(), equalTo(parameter.projectSummary()));
assertThat(grant.getStartDate(), equalTo(DEFAULT_START_DATE));
assertThat(grant.getGrantOfferLetterDate(), equalTo(DEFAULT_GOL_DATE));
assertThat(grant.getSourceSystem(), equalTo("IFS"));
// expect 1 Project Manager record, one Finance Contact record for each Organisation and 1 innovation lead record and 1 monitoring officer
int expectedNumberOfParticipantRecords = 1 + (parameter.partnerOrganisationCount) + 1 + 1;
assertThat(grant.getParticipants(), hasSize(expectedNumberOfParticipantRecords));
Participant projectManagerParticipant = getOnlyElement(simpleFilter(grant.getParticipants(),
participant -> "Project manager".equals(participant.getContactRole())));
List<Participant> financeContactParticipants = simpleFilter(grant.getParticipants(),
participant -> "Finance contact".equals(participant.getContactRole()));
Participant innovationLeadParticipant = getOnlyElement(simpleFilter(grant.getParticipants(),
participant -> "Innovation lead".equals(participant.getContactRole())));
Participant monitoringOfficerParticipant = getOnlyElement(simpleFilter(grant.getParticipants(),
participant -> "Monitoring officer".equals(participant.getContactRole())));
assertThat(monitoringOfficerParticipant.getContactEmail(), equalTo("mo@example.com"));
assertThat(projectManagerParticipant.getContactEmail(), equalTo("pm@example.com"));
assertThat(innovationLeadParticipant.getContactEmail(), equalTo("il1@example.com"));
forEachWithIndex(financeContactParticipants, (i, participant) -> {
assertThat(participant.getForecasts().size(), equalTo(parameter.costCategoryCount()));
Forecast overheads = participant.getForecasts().stream()
.filter(forecast -> OVERHEADS.equals(forecast.getCostCategory()))
.findFirst()
.orElseThrow(IllegalStateException::new);
assertThat(overheads.getPeriods().size(), equalTo(parameter.duration()));
if (parameter.expectedOverheads().size() > i) {
assertThat(overheads.getCost(), equalTo(parameter.expectedOverheads().get(i)));
}
if (parameter.expectedOverheadRates().size() > i) {
assertThat(participant.getOverheadRate().longValue(), equalTo(parameter.expectedOverheadRates().get(i)));
}
});
}
@Parameters
public static Collection<Parameter> parameters() {
return asList(
newParameter("basic", newProject()),
newParameter("single", newProject()).duration(1).expectedOverheads(10L)
);
}
private static Parameter newParameter(String name, ProjectBuilder projectBuilder) {
return new Parameter().name(name).projectBuilder(projectBuilder);
}
private static class Parameter {
private ProjectBuilder projectBuilder;
private String name;
private List<FormInputResponse> formInputResponses = new ArrayList<>();
private long competitionId = 2L;
private long applicationId = 1L;
private long projectId = 1L;
private String projectSummary;
private String publicDescription;
private int duration = 12;
private int partnerOrganisationCount = 3;
private int costCategoryCount = 2;
private int userCount = 3;
private int value = 10;
private List<Long> expectedOverheads = Collections.singletonList(120L);
private List<Long> expectedOverheadRates = Collections.singletonList(50L);
private Parameter projectBuilder(ProjectBuilder projectBuilder) {
this.projectBuilder = projectBuilder;
return this;
}
private ProjectBuilder projectBuilder() {
return projectBuilder;
}
private Parameter applicationId(long applicationId) {
this.applicationId = applicationId;
return this;
}
private long applicationId() {
return applicationId;
}
private Parameter competitionId(long competitionId) {
this.competitionId = competitionId;
return this;
}
private long competitionId() {
return competitionId;
}
private Parameter name(String name) {
this.name = name;
return this;
}
private String name() {
return name;
}
private Parameter participantCount(int participantCount) {
this.partnerOrganisationCount = participantCount;
return this;
}
private int participantCount() {
return partnerOrganisationCount;
}
private Parameter costCategoryCount(int costCategoryCount) {
this.costCategoryCount = costCategoryCount;
return this;
}
private int costCategoryCount() {
return costCategoryCount;
}
private Parameter duration(int duration) {
this.duration = duration;
return this;
}
private int duration() {
return duration;
}
private Parameter expectedOverheads(Long... expectedOverheads) {
this.expectedOverheads = asList(expectedOverheads);
return this;
}
private List<Long> expectedOverheads() {
return expectedOverheads;
}
private List<Long> expectedOverheadRates() {
return expectedOverheadRates;
}
private Parameter expectedOverheadRates(Long... expectedOverheadRates) {
this.expectedOverheadRates = asList(expectedOverheadRates);
return this;
}
private String publicDescription() {
return publicDescription == null ? name + " public description" : publicDescription;
}
private String projectSummary() {
return publicDescription == null ? name + " project summary" : projectSummary;
}
private FormInputResponse projectSummaryResponse() {
FormInputResponse response = createFormInputResponse("Project summary");
response.setValue(projectSummary());
return response;
}
private FormInputResponse publicDescriptionResponse() {
FormInputResponse response = createFormInputResponse("Project summary");
response.setValue(publicDescription());
return response;
}
private FormInputResponse createFormInputResponse(String description) {
return createFormInputResponse(description, name + " " + description);
}
private FormInputResponse createFormInputResponse(String description, String value) {
FormInputResponse formInputResponse = new FormInputResponse();
FormInput formInput = new FormInput();
formInput.setDescription(description);
formInputResponse.setFormInput(formInput);
formInputResponse.setValue(value);
return formInputResponse;
}
private SpendProfile createSpendProfile() {
List<Cost> eligibleCosts = new ArrayList<>();
List<Cost> spendProfileFigures = new ArrayList<>();
for (int costCategoryIndex = 0 ; costCategoryIndex < costCategoryCount ; costCategoryIndex++ ) {
for (int durationIndex = 0 ; durationIndex < duration ; durationIndex++ ) {
spendProfileFigures.add(new Cost(BigDecimal.valueOf(value))
.withTimePeriod(durationIndex, null, null, null)
.withCategory(new CostCategory(costCategoryIndex == 0
? OVERHEADS : "cost-" + costCategoryIndex)));
}
}
return new SpendProfile(null, null, null, eligibleCosts, spendProfileFigures, null, null, true);
}
private Project createProject() {
List<PartnerOrganisation> partnerOrganisations = newPartnerOrganisation()
.withOrganisation(newOrganisation().withOrganisationType(OrganisationTypeEnum.BUSINESS).build())
.withLeadOrganisation(true, false)
.withPostcode("123 ABC")
.build(partnerOrganisationCount);
List<ProjectUser> leadOrganisationProjectUsers = newProjectUser().
withOrganisation(partnerOrganisations.get(0).getOrganisation()).
withRole(PROJECT_MANAGER, PROJECT_FINANCE_CONTACT, PROJECT_PARTNER).
withUser(combineLists(
newUser().withEmailAddress("pm@example.com", "fc1@example.com").build(2),
newUser().build(userCount - 2)).toArray(new User[] {}
)).
build(userCount);
List<ProjectUser> org2ProjectUsers = newProjectUser().
withOrganisation(partnerOrganisations.get(1).getOrganisation()).
withRole(PROJECT_FINANCE_CONTACT, PROJECT_PARTNER).
withUser(combineLists(
newUser().withEmailAddress("fc2@example.com").build(1),
newUser().build(userCount - 1)).toArray(new User[] {}
)).
build(userCount);
List<ProjectUser> org3ProjectUsers = newProjectUser().
withOrganisation(partnerOrganisations.get(2).getOrganisation()).
withRole(PROJECT_FINANCE_CONTACT, PROJECT_PARTNER).
withUser(combineLists(
newUser().withEmailAddress("fc3@example.com").build(1),
newUser().build(userCount - 1)).toArray(new User[] {}
)).
build(userCount);
List<ProjectUser> projectUsers = combineLists(leadOrganisationProjectUsers, org2ProjectUsers, org3ProjectUsers);
MonitoringOfficer projectMonitoringOfficer = newMonitoringOfficer()
.withUser(newUser().withEmailAddress("mo@example.com").build())
.build();
return projectBuilder
.withDuration((long) duration)
.withId(projectId)
.withTargetStartDate(DEFAULT_START_DATE)
.withOfferSubmittedDate(DEFAULT_GOL_DATE)
.withPartnerOrganisations(partnerOrganisations)
.withApplication(
newApplication()
.withId(applicationId)
.withCompetition(
newCompetition().withId(competitionId).build())
.build())
.withProjectUsers(projectUsers)
.withProjectMonitoringOfficer(projectMonitoringOfficer)
.build();
}
}
}
|
import java.io.*;
import java.util.*;
public class Solution {
public static void main(String[] args) throws IOException {
StringBuffer sb = new StringBuffer();
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
//For each test
XorTree tree = new XorTree((byte)Short.SIZE);
for(int T = Integer.parseInt(br.readLine()); T > 0; T
//Get input
String[] temp = br.readLine().split(" ");
int N = Integer.parseInt(temp[0]);
int Q = Integer.parseInt(temp[1]);
short[] ar = new short[N];
temp = br.readLine().split(" ");
for(int i = 0; i < N; ++i){
ar[i] = Short.parseShort(temp[i]);
}
//Initialize xor tree
tree.clear();
for(int i = 0; i < N; ++i){
tree.add(i+1, ar[i]);
}
//For each query
for(int i = 0; i < Q; ++i){
//Get input
temp = br.readLine().split(" ");
short a = Short.parseShort(temp[0]);
int p = Integer.parseInt(temp[1]);
int q = Integer.parseInt(temp[2]);
//Get max xor value
short max = (short)tree.maxXor(a, p, q);
//Output
sb.append(max + "\n");
}
}
System.out.print(sb);
}
private static class XorTree{
private Node root;
private byte numBits;
private long[] bitMasks;
public XorTree(byte numBits){
numBits = numBits < 1 ? (byte)1 : numBits;
numBits = numBits > Long.SIZE ? (byte)Long.SIZE : numBits;
this.root = null;
this.numBits = numBits;
this.bitMasks = new long[numBits];
this.bitMasks[0] = 1;
for(byte i = 1; i < numBits; ++i){
bitMasks[i] = bitMasks[i-1] << 1L;
}
}
public void add(int index, long val){
this.root = this.root == null ? new Node() : this.root;
this.root.add(index);
Node node = this.root;
byte bit = this.numBits;
while (bit
node = ((val & this.bitMasks[bit]) == 0) ?
(node.zero = (node.zero == null) ? new Node() : node.zero):
(node.one = (node.one == null) ? new Node() : node.one );
node.add(index);
}
}
private long maxXor(long n, int minIndex, int maxIndex){
if (this.root == null || !this.root.hasIndexInRange(minIndex, maxIndex)){
return -1L;
}
long xor = 0L;
Node node = this.root;
byte bit = this.numBits;
while (bit
long mask = this.bitMasks[bit];
if ((n & mask) == 0){
if (node.one != null && (node.zero == null || node.one.hasIndexInRange(minIndex, maxIndex))){
xor += mask;
node = node.one;
} else {
node = node.zero;
}
} else if (node.zero != null && (node.one == null || node.zero.hasIndexInRange(minIndex, maxIndex))){
xor += mask;
node = node.zero;
} else {
node = node.one;
}
}
return xor;
}
public void clear(){
this.root = null;
}
public static class Node{
public Node one;
public Node zero;
private List<Integer> indices;
public Node(){
this.one = null;
this.zero = null;
this.indices = new ArrayList<Integer>();
}
public void add(int index){
int size = this.indices.size();
if (size < 1 || this.indices.get(size-1) < index){
this.indices.add(index);
} else {
int i = binarySearch(this.indices, index, 0, size);
if (i < 0){
this.indices.add(-i - 1, index);
}
}
}
public boolean hasIndexInRange(int minIndex, int maxIndex){
int size = indices.size();
minIndex = binarySearch(this.indices, minIndex, 0, size);
if (minIndex >= 0){
return true;
}
maxIndex = binarySearch(this.indices, maxIndex, -minIndex - 1, size);
return (maxIndex < 0) ? minIndex != maxIndex : true;
}
private int binarySearch(List<Integer> list, int val, int min, int max){
while (min < max){
int mid = min + (max - min)/2;
int midVal = list.get(mid);
if (val == midVal){
return mid;
}
if (val < midVal){
max = mid;
} else {
min = mid + 1;
}
}
return -min - 1;
}
}
}
}
|
package com.timgroup.eventstore.mysql;
import com.mchange.v2.c3p0.ComboPooledDataSource;
import com.mchange.v2.c3p0.PooledDataSource;
import com.typesafe.config.Config;
import java.util.Properties;
import static java.lang.String.format;
public final class StacksConfiguredDataSource {
private StacksConfiguredDataSource() { /* prevent instantiation */ }
public static PooledDataSource pooledMasterDb(Properties properties, String configPrefix) {
String prefix = configPrefix;
if (properties.getProperty(prefix + "hostname") == null) {
prefix = "db." + prefix + ".";
if (properties.getProperty(prefix) == null) {
throw new IllegalArgumentException("unable to read configuration for data source with prefix + " + configPrefix);
}
}
return pooled(
properties.getProperty(prefix + "hostname"),
Integer.parseInt(properties.getProperty(prefix + "port")),
properties.getProperty(prefix + "username"),
properties.getProperty(prefix + "password"),
properties.getProperty(prefix + "database"),
properties.getProperty(prefix + "driver")
);
}
public static PooledDataSource pooledReadOnlyDb(Properties properties, String configPrefix) {
String prefix = configPrefix;
if (properties.getProperty(prefix + "read_only_cluster") == null) {
prefix = "db." + prefix + ".";
if (properties.getProperty(prefix) == null) {
throw new IllegalArgumentException("unable to read configuration for data source with prefix + " + configPrefix);
}
}
return pooled(
properties.getProperty(prefix + "read_only_cluster"),
Integer.parseInt(properties.getProperty(prefix + "port")),
properties.getProperty(prefix + "username"),
properties.getProperty(prefix + "password"),
properties.getProperty(prefix + "database"),
properties.getProperty(prefix + "driver")
);
}
public static PooledDataSource pooledMasterDb(Config config) {
return pooled(
config.getString("hostname"),
config.getInt("port"),
config.getString("username"),
config.getString("password"),
config.getString("database"),
config.getString("driver")
);
}
public static PooledDataSource pooledReadOnlyDb(Config config) {
return pooled(
config.getString("read_only_cluster"),
config.getInt("port"),
config.getString("username"),
config.getString("password"),
config.getString("database"),
config.getString("driver")
);
}
private static PooledDataSource pooled(String hostname, int port, String username, String password, String database, String driver) {
ComboPooledDataSource dataSource = new ComboPooledDataSource();
dataSource.setJdbcUrl(format("jdbc:mysql://%s:%d/%s?rewriteBatchedStatements=true",
hostname,
port,
database));
dataSource.setUser(username);
dataSource.setPassword(password);
dataSource.setIdleConnectionTestPeriod(60 * 5);
dataSource.setMinPoolSize(1);
dataSource.setInitialPoolSize(1);
dataSource.setAcquireIncrement(1);
try {
Class.forName(driver);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
return dataSource;
}
}
|
package oasis.web.applications;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import com.google.common.base.Strings;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import oasis.model.InvalidVersionException;
import oasis.model.applications.v2.AccessControlEntry;
import oasis.model.applications.v2.AccessControlRepository;
import oasis.model.applications.v2.AppInstanceRepository;
import oasis.services.authz.AppAdminHelper;
import oasis.services.etag.EtagService;
import oasis.web.authn.Authenticated;
import oasis.web.authn.OAuth;
import oasis.web.authn.OAuthPrincipal;
import oasis.web.utils.ResponseFactory;
@Path("/apps/acl/ace/{ace_id}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Authenticated @OAuth
@Api(value = "acl-ace", description = "Access Control Entry")
public class AccessControlEntryEndpoint {
@Inject AccessControlRepository accessControlRepository;
@Inject AppInstanceRepository appInstanceRepository;
@Inject AppAdminHelper appAdminHelper;
@Inject EtagService etagService;
@Context SecurityContext securityContext;
@PathParam("ace_id") String ace_id;
@GET
@ApiOperation(
value = "Retrieves an ACE",
response = AccessControlEntry.class
)
public Response get() {
AccessControlEntry ace = accessControlRepository.getAccessControlEntry(ace_id);
if (ace == null) {
return ResponseFactory.NOT_FOUND;
}
if (!isAppAdmin(((OAuthPrincipal) securityContext.getUserPrincipal()).getAccessToken().getAccountId(), ace.getInstance_id())) {
return ResponseFactory.forbidden("Current user is not an app_admin for the application instance");
}
return Response.ok()
.tag(etagService.getEtag(ace))
.entity(ace)
.build();
}
@DELETE
@ApiOperation("Deletes an ACE")
public Response revoke(@HeaderParam(HttpHeaders.IF_MATCH) String ifMatch) {
if (Strings.isNullOrEmpty(ifMatch)) {
return ResponseFactory.preconditionRequiredIfMatch();
}
AccessControlEntry ace = accessControlRepository.getAccessControlEntry(ace_id);
if (ace == null) {
return ResponseFactory.NOT_FOUND;
}
if (!isAppAdmin(((OAuthPrincipal) securityContext.getUserPrincipal()).getAccessToken().getAccountId(), ace.getInstance_id())) {
return ResponseFactory.forbidden("Current user is not an app_admin for the application instance");
}
boolean deleted;
try {
deleted = accessControlRepository.deleteAccessControlEntry(ace_id, etagService.parseEtag(ifMatch));
} catch (InvalidVersionException ive) {
return ResponseFactory.preconditionFailed(ive.getMessage());
}
if (!deleted) {
return ResponseFactory.NOT_FOUND;
}
return ResponseFactory.NO_CONTENT;
}
private boolean isAppAdmin(String userId, String instanceId) {
return appAdminHelper.isAdmin(userId, appInstanceRepository.getAppInstance(instanceId));
}
}
|
package roslab.processors.electronics;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import roslab.model.electronics.Circuit;
import roslab.model.electronics.Pin;
/**
* Parse EAGLE Schematics (XML)
*
* @author Peter Gebhard
*/
public class EagleSchematic {
static Logger logger = LoggerFactory.getLogger(EagleSchematic.class);
private File schematic = null;
// Map original net names to connected net names (map value is null if that
// net is unconnected)
private Map<String, String> nets = new HashMap<String, String>();
private List<String> requiredNets = new ArrayList<String>();
/**
* Construct an EAGLE Schematic object from the input file.
*
* @param schematic
*/
public EagleSchematic(File schematic) {
this.schematic = schematic;
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(this.schematic);
// optional, but recommended
// read this -
doc.getDocumentElement().normalize();
System.out.println("Schematic : " + this.schematic.getName());
NodeList nList = doc.getElementsByTagName("net");
for (int temp = 0; temp < nList.getLength(); temp++) {
Element nNode = (Element) nList.item(temp);
// Find required pins
NodeList pinrefList = nNode.getElementsByTagName("pinref");
for (int pr = 0; pr < pinrefList.getLength(); pr++) {
Element prNode = (Element) pinrefList.item(pr);
if (prNode.getAttribute("pin").equals("BLOCK_REQUIREMENT")) {
requiredNets.add(nNode.getAttribute("name"));
logger.debug(" Required Net: " + nNode.getAttribute("name"));
}
}
nets.put(nNode.getAttribute("name"), null);
logger.debug(" Net: " + nNode.getAttribute("name"));
}
}
catch (Exception e) {
e.printStackTrace();
}
}
/**
* @return the schematic's name
*/
public String getName() {
// Return the schematic filename without the '.sch' file extension
return schematic.getName().substring(0, schematic.getName().lastIndexOf('.'));
}
/**
* @return the schematic
*/
public File getSchematicFile() {
return schematic;
}
/**
* @return the nets
*/
public Map<String, String> getNets() {
return nets;
}
/**
* @param net
* Name of the net to be renamed.
* @param newName
* Name to which the net should be renamed.
*/
private void setNet(String net, String newName) {
nets.put(net, newName);
}
private List<String> getRequiredNets() {
return requiredNets;
}
/**
* @param net
* Name of the net whose verification data is returned.
*/
public Map<String, String> getVerificationData(String net) {
// TODO Extract verification data from schematic, return in mapping of
// key-value pairs (ie. <"voltage", "5">, <"current", "0.5"> )
return null;
}
/**
* @param output
* File where the schematic will be saved.
*/
public void save(File output) {
// TODO Save schematic to given File object.
}
/**
* @param schematics
* List of schematics that will be merged into one.
*/
public static EagleSchematic merge(List<EagleSchematic> schematics, String filename) {
if (schematics.size() < 2) {
throw new IllegalArgumentException("Cannot merge schematics if the input contains less than 2.");
}
File mergedSch = new File(schematics.get(0).getSchematicFile().getParent() + File.separatorChar + filename);
// Find which schematic file is the largest in line count; assuming that
// one is the most important, choose it as the one to accept merges from
// the other schematics.
long largest = schematics.get(0).schematic.length();
File largestSch = schematics.get(0).schematic;
for (EagleSchematic sch : schematics) {
if (sch.schematic.length() > largest) {
largest = sch.schematic.length();
largestSch = sch.schematic;
}
}
// Copy contents of largest schematic to our merged output schematic
// file.
try {
Files.copy(largestSch.toPath(), mergedSch.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
catch (IOException e1) {
e1.printStackTrace();
}
// Parse contents of the schematic where we want to merge the others
// (and parse the others that are to be merged)
try {
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document mDoc = dBuilder.parse(mergedSch);
mDoc.getDocumentElement().normalize();
Node mDocLayersNode = null;
Element mDocLibrariesNode = null;
Node mDocPackagesNode = null;
Node mDocSymbolsNode = null;
Node mDocDeviceSetsNode = null;
Node mDocPartsNode = null;
Node mDocSheetsNode = null;
NodeList mDocLayerList = mDoc.getElementsByTagName("layer");
if (mDocLayerList.getLength() > 0) {
mDocLayersNode = mDocLayerList.item(0).getParentNode();
}
else {
mDocLayersNode = mDoc.createElement("layers");
}
NodeList mDocLibraryList = mDoc.getElementsByTagName("libraries");
if (mDocLibraryList.getLength() > 0) {
mDocLibrariesNode = (Element) mDocLibraryList.item(0);
}
else {
mDocLibrariesNode = mDoc.createElement("libraries");
}
NodeList mDocLibraryList2 = mDoc.getElementsByTagName("library");
Element nTest = (Element) mDocLibraryList2.item(1).getParentNode();
NodeList mDocPartList = mDoc.getElementsByTagName("part");
if (mDocPartList.getLength() > 0) {
mDocPartsNode = mDocPartList.item(0).getParentNode();
}
else {
mDocPartsNode = mDoc.createElement("parts");
}
// Do merging for each schematic
for (EagleSchematic sch : schematics) {
// Skip merging of the largest schematic, since the merging
// output schematic is the same thing (we chose to start with
// the largest schematic)!
if (sch.schematic.equals(largestSch)) {
// Handle renaming of nets for merge schematic
NodeList mDocNetList = mDoc.getElementsByTagName("net");
for (int p = 0; p < mDocNetList.getLength(); p++) {
Element mDocNet = (Element) mDocNetList.item(p);
logger.debug("Net mDoc: " + mDocNet.getAttribute("name"));
String newNetName = sch.nets.get(mDocNet.getAttribute("name"));
if (newNetName != null) {
mDocNet.setAttribute("name", newNetName);
}
}
continue;
}
// Parse the schematic into a Document
Document doc = dBuilder.parse(sch.schematic);
doc.getDocumentElement().normalize();
logger.debug("Schematic: " + sch.getName());
// An 'exists' flag used when checking if an Element already
// exists in the merge document
boolean exists = false;
// Handle layers
NodeList docLayerList = doc.getElementsByTagName("layer");
for (int i = 0; i < docLayerList.getLength(); i++) {
Element docLayer = (Element) docLayerList.item(i);
logger.debug("Layer doc: " + docLayer.getAttribute("name"));
// Check if layer exists already in merge document
for (int j = 0; j < mDocLayerList.getLength(); j++) {
Element mDocLayer = (Element) mDocLayerList.item(j);
logger.debug("Layer mDoc: " + mDocLayer.getAttribute("name"));
if (mDocLayer.getAttribute("name").equals(docLayer.getAttribute("name"))) {
exists = true;
logger.debug("Layer EXISTS");
break;
}
}
// If the layer does not exist in the merge document, add it
if (!exists) {
logger.debug("Layer doc: " + docLayer.getAttribute("name"));
int newNum = Integer.parseInt(docLayer.getAttribute("number"));
while (!isUniqueLayerNum(mDocLayerList, newNum) || !isUniqueLayerNum(docLayerList, newNum)) {
newNum++;
}
docLayer.setAttribute("number", Integer.toString(newNum));
mDocLayersNode.appendChild(mDoc.importNode(docLayer, true));
}
// Reset 'exists' flag
exists = false;
}
// Handle libraries
NodeList docLibraryList = doc.getElementsByTagName("library");
for (int i = 0; i < docLibraryList.getLength(); i++) {
Element docLibrary = (Element) docLibraryList.item(i);
logger.debug("Library doc: [" + i + " / " + docLibraryList.getLength() + " ] " + docLibrary.getAttribute("name"));
// Check if library exists already in merge document
for (int j = 0; j < mDocLibraryList2.getLength(); j++) {
Element mDocLibrary = (Element) mDocLibraryList2.item(j);
logger.debug("Library mDoc: [" + j + " / " + mDocLibraryList2.getLength() + " ] " + mDocLibrary.getAttribute("name"));
if (mDocLibrary.getAttribute("name").equals(docLibrary.getAttribute("name"))) {
exists = true;
// Handle merging of packages in similar libraries
boolean packageExists = false;
NodeList mDocPackageList = mDocLibrary.getElementsByTagName("package");
if (mDocPackageList.getLength() > 0) {
mDocPackagesNode = mDocPackageList.item(0).getParentNode();
}
else {
mDocPackagesNode = mDoc.createElement("packages");
}
NodeList docPackageList = docLibrary.getElementsByTagName("package");
for (int k = 0; k < docPackageList.getLength(); k++) {
Element docPackage = (Element) docPackageList.item(k);
logger.debug("Package doc: " + docPackage.getAttribute("name"));
for (int m = 0; m < mDocPackageList.getLength(); m++) {
Element mDocPackage = (Element) mDocPackageList.item(m);
logger.debug("Package mDoc: " + mDocPackage.getAttribute("name"));
if (mDocPackage.getAttribute("name").equals(docPackage.getAttribute("name"))) {
logger.debug("Package doc DOES exist: " + docPackage.getAttribute("name"));
packageExists = true;
break;
}
}
// If the package does not exist in the
// merge document's library, add it
if (!packageExists) {
logger.debug("Package doc does NOT exist: " + docPackage.getAttribute("name"));
mDocPackagesNode.appendChild(mDoc.importNode(docPackage, true));
mDocLibrary.appendChild(mDocPackagesNode);
mDocLibrariesNode.appendChild(mDocLibrary);
}
// Reset 'packageExists' flag
packageExists = false;
}
// Handle merging of symbols in similar libraries
boolean symbolExists = false;
NodeList mDocSymbolList = mDocLibrary.getElementsByTagName("symbol");
if (mDocSymbolList.getLength() > 0) {
mDocSymbolsNode = mDocSymbolList.item(0).getParentNode();
}
else {
mDocSymbolsNode = mDoc.createElement("symbols");
}
NodeList docSymbolList = docLibrary.getElementsByTagName("symbol");
for (int k = 0; k < docSymbolList.getLength(); k++) {
Element docSymbol = (Element) docSymbolList.item(k);
logger.debug("Symbol doc: " + docSymbol.getAttribute("name"));
for (int m = 0; m < mDocSymbolList.getLength(); m++) {
Element mDocSymbol = (Element) mDocSymbolList.item(m);
logger.debug("Symbol mDoc: " + mDocSymbol.getAttribute("name"));
if (mDocSymbol.getAttribute("name").equals(docSymbol.getAttribute("name"))) {
symbolExists = true;
break;
}
}
// If the symbol does not exist in the merge
// document's library, add it
if (!symbolExists) {
logger.debug("Symbol doc does NOT exist: " + docSymbol.getAttribute("name"));
mDocSymbolsNode.appendChild(mDoc.importNode(docSymbol, true));
}
// Reset 'symbolExists' flag
symbolExists = false;
}
// Handle merging of devicesets in similar libraries
boolean devsetExists = false;
NodeList mDocDeviceSetList = mDocLibrary.getElementsByTagName("deviceset");
if (mDocDeviceSetList.getLength() > 0) {
mDocDeviceSetsNode = mDocDeviceSetList.item(0).getParentNode();
}
else {
mDocDeviceSetsNode = mDoc.createElement("devicesets");
}
NodeList docDeviceSetList = docLibrary.getElementsByTagName("deviceset");
for (int k = 0; k < docDeviceSetList.getLength(); k++) {
logger.debug("DeviceSet doc: " + ((Element) docDeviceSetList.item(k)).getAttribute("name"));
for (int m = 0; m < mDocDeviceSetList.getLength(); m++) {
logger.debug("DeviceSet mDoc: " + ((Element) mDocDeviceSetList.item(m)).getAttribute("name"));
if (((Element) mDocDeviceSetList.item(m)).getAttribute("name").equals(
((Element) docDeviceSetList.item(k)).getAttribute("name"))) {
devsetExists = true;
break;
}
}
// If the symbol does not exist in the merge
// document's library, add it
if (!devsetExists) {
logger.debug("DeviceSet doc does NOT exist: " + ((Element) docDeviceSetList.item(k)).getAttribute("name"));
mDocDeviceSetsNode.appendChild(mDoc.importNode(docDeviceSetList.item(k), true));
}
// Reset 'devsetExists' flag
devsetExists = false;
}
// Break out of the loop if we found the library
// already exists.
break;
}
}
// If the library does not exist in the merge document,
// add it
if (!exists) {
mDocLibrariesNode.appendChild(mDoc.importNode(docLibraryList.item(i), true));
}
// Reset 'exists' flag
exists = false;
}
// Handle merging of parts
NodeList docPartList = doc.getElementsByTagName("part");
for (int p = 0; p < docPartList.getLength(); p++) {
Element docPart = (Element) docPartList.item(p);
logger.debug("Part doc: " + docPart.getAttribute("name"));
for (int m = 0; m < mDocPartList.getLength(); m++) {
Element mDocPart = (Element) mDocPartList.item(m);
logger.debug("Part mDoc: " + mDocPart.getAttribute("name"));
if (mDocPart.getAttribute("name").equals(docPart.getAttribute("name"))) {
// If there is a name conflict, pick a unique name
// for part. Try again if there is still a conflict
// with the new name.
String newName = makeUniqueName(docPart.getAttribute("name"));
while (!isUniqueName(mDocPartList, newName) || !isUniqueName(docPartList, newName)) {
newName = makeUniqueName(newName);
}
// Update all references to the old part name in
// pinref and instance tags
NodeList docPinrefList = doc.getElementsByTagName("pinref");
for (int pinref = 0; pinref < docPinrefList.getLength(); pinref++) {
Element docPinref = (Element) docPinrefList.item(pinref);
if (docPinref.getAttribute("part").equals(docPart.getAttribute("name"))) {
logger.debug("Pinref doc: " + docPinref.getAttribute("part"));
docPinref.setAttribute("part", newName);
}
}
NodeList docInstanceList = doc.getElementsByTagName("instance");
for (int inst = 0; inst < docInstanceList.getLength(); inst++) {
Element docInst = (Element) docInstanceList.item(inst);
if (docInst.getAttribute("part").equals(docPart.getAttribute("name"))) {
logger.debug("Instance doc: " + docInst.getAttribute("part"));
docInst.setAttribute("part", newName);
}
}
docPart.setAttribute("name", newName);
}
}
// Merge the part from the original document to the new
// merge document
mDocPartsNode.appendChild(mDoc.importNode(docPart, true));
}
// Handle net renaming
// TODO Handle case where net names in source schematic and
// destination schematic have not been renamed (ie. pins have
// not been connected), but the two nets have matching names. In
// this case, one of the nets needs to be renamed to something
// unique to prevent an "accidental" connection.
NodeList docNetList = doc.getElementsByTagName("net");
for (int p = 0; p < docNetList.getLength(); p++) {
Element docNet = (Element) docNetList.item(p);
logger.debug("Net doc: " + docNet.getAttribute("name"));
String newNetName = sch.nets.get(docNet.getAttribute("name"));
if (newNetName != null) {
docNet.setAttribute("name", newNetName);
}
}
// Handle merging of sheets
NodeList mDocSheetList = mDoc.getElementsByTagName("sheet");
if (mDocSheetList.getLength() > 0) {
mDocSheetsNode = mDocSheetList.item(0).getParentNode();
}
else {
mDocSheetsNode = mDoc.createElement("sheets");
}
NodeList docSheetList = doc.getElementsByTagName("sheet");
for (int p = 0; p < docSheetList.getLength(); p++) {
logger.debug("Sheet doc: " + ((Element) docSheetList.item(p)).getAttribute("name"));
mDocSheetsNode.appendChild(mDoc.importNode(docSheetList.item(p), true));
}
}
TransformerFactory factory = TransformerFactory.newInstance();
Transformer transformer = factory.newTransformer();
Properties outFormat = new Properties();
outFormat.setProperty(OutputKeys.INDENT, "yes");
outFormat.setProperty(OutputKeys.METHOD, "xml");
outFormat.setProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
outFormat.setProperty(OutputKeys.VERSION, "1.0");
outFormat.setProperty(OutputKeys.ENCODING, "UTF-8");
transformer.setOutputProperties(outFormat);
DOMSource domSource = new DOMSource(mDoc.getDocumentElement());
StreamResult result = new StreamResult(new FileOutputStream("test.sch"));
transformer.transform(domSource, result);
}
catch (Exception e) {
e.printStackTrace();
}
return new EagleSchematic(mergedSch);
}
private static boolean isUniqueName(NodeList mDocPartList, String newName) {
for (int m = 0; m < mDocPartList.getLength(); m++) {
if (((Element) mDocPartList.item(m)).getAttribute("name").equals(newName)) {
return false;
}
}
return true;
}
// Find the number at the end of the input and increment it.
// If there is no number, append '1' to the input.
private static String makeUniqueName(String name) {
// Try to find the entire number at the end of the string
int i = name.length();
while (i > 0 && Character.isDigit(name.charAt(i - 1))) {
i
}
// Handle case where there is no number at the end of the input string
if (i == name.length()) {
return name.concat("1");
}
// Get integer from ending number in the string
int num = Integer.parseInt(name.substring(i));
// Increment the ending number
num++;
// Replace the ending number with the incremented value
return name.substring(0, i).concat(String.valueOf(num));
}
private static boolean isUniqueLayerNum(NodeList mDocList, int newNum) {
for (int m = 0; m < mDocList.getLength(); m++) {
if (((Element) mDocList.item(m)).getAttribute("number").equals(newNum)) {
return false;
}
}
return true;
}
/**
* @param schematicNetMap
* Map of schematics to the net (in that schematic) which is to
* be renamed.
* @param newName
* Name to which the nets in the map should be renamed.
*/
public static void connect(Map<EagleSchematic, String> schematicNetMap, String newName) {
for (Entry<EagleSchematic, String> e : schematicNetMap.entrySet()) {
e.getKey().setNet(e.getValue(), newName);
}
}
public static Circuit buildCircuitFromSchematic(EagleSchematic sch) {
Circuit c = new Circuit(sch.getName());
c.setSchematic(sch);
for (String net : sch.getNets().keySet()) {
Pin p = Pin.getPinFromString(net, c);
p.setNet(net);
p.setRequired(sch.getRequiredNets().contains(net));
c.addPin(p);
}
return c;
}
/*
* (non-Javadoc)
* @see java.lang.Object#clone()
*/
@Override
public EagleSchematic clone() {
return new EagleSchematic(schematic);
}
}
|
package org.smeup.sys.db.core.base;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.datatools.sqltools.parsers.sql.query.SQLQueryParseResult;
import org.smeup.sys.db.core.QCatalogContainer;
import org.smeup.sys.db.core.QCatalogGenerationStrategy;
import org.smeup.sys.db.core.QCatalogMetaData;
import org.smeup.sys.db.core.QConnection;
import org.smeup.sys.db.core.QDatabaseContainer;
import org.smeup.sys.db.core.QStatement;
import org.smeup.sys.db.syntax.QAliasResolver;
import org.smeup.sys.db.syntax.QQueryParser;
import org.smeup.sys.il.core.ctx.QContext;
public class BaseConnectionImpl implements QConnection {
private QContext context;
private QDatabaseContainer databaseContainer;
private QCatalogMetaData catalogMetaData;
private QQueryParser queryParser;
private QAliasResolver aliasResolver;
private String virtualCatalog;
private BaseCatalogConnection currentCatalogConnection;
private List<BaseCatalogConnection> catalogConnections;
public BaseConnectionImpl(QDatabaseContainer databaseContainer, QContext context) {
this.context = context;
this.databaseContainer = databaseContainer;
this.queryParser = context.get(QQueryParser.class);
this.catalogConnections = new ArrayList<BaseCatalogConnection>();
}
@Override
public void close() throws SQLException {
for (BaseCatalogConnection catalogConnection : catalogConnections)
catalogConnection.close();
this.catalogConnections.clear();
this.currentCatalogConnection = null;
this.virtualCatalog = null;
this.context.close();
}
@Override
public BaseStatementImpl createStatement() throws SQLException {
return createStatement(false);
}
@Override
public BaseStatementImpl createStatement(boolean native_) throws SQLException {
return createStatement(native_, false);
}
@SuppressWarnings("resource")
@Override
public BaseStatementImpl createStatement(boolean native_, boolean updatable) throws SQLException {
Statement sqlStatement = null;
if (updatable)
// connection.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
sqlStatement = getRawConnection().createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
else
sqlStatement = getRawConnection().createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY);
BaseStatementImpl statement = new BaseStatementImpl(this, sqlStatement, native_);
return statement;
}
@Override
public String getCatalog() throws SQLException {
return virtualCatalog;
}
private BaseCatalogConnection getCatalogConnection() throws SQLException {
if (currentCatalogConnection != null)
return currentCatalogConnection;
if (getCatalog() == null) {
QCatalogContainer catalogContainer = this.databaseContainer.getDefaultCatalogContainer();
// search on connected catalog
for (BaseCatalogConnection catalogConnection : catalogConnections)
if (catalogConnection.getCatalogContainer().equals(catalogContainer))
return catalogConnection;
currentCatalogConnection = new BaseCatalogConnection(catalogContainer);
catalogConnections.add(currentCatalogConnection);
} else {
// search on connected catalog
for (BaseCatalogConnection catalogConnection : catalogConnections)
if (getCatalog().equals(catalogConnection.getCatalogContainer().getName()))
return catalogConnection;
for (QCatalogContainer catalogContainer : this.databaseContainer.getCatalogContainers())
if (getCatalog().equals(catalogContainer.getName())) {
currentCatalogConnection = new BaseCatalogConnection(catalogContainer);
catalogConnections.add(currentCatalogConnection);
break;
}
}
return currentCatalogConnection;
}
@Override
public QCatalogGenerationStrategy getCatalogGenerationStrategy() {
try {
return getCatalogConnection().getCatalogGenerationStrategy();
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
@Override
public QCatalogMetaData getCatalogMetaData() {
if (this.catalogMetaData == null) {
synchronized (this) {
if (this.catalogMetaData == null) {
try {
catalogMetaData = new BaseCatalogMetaDataConnectionImpl(this, getCatalogConnection().getCatalogMetaData());
} catch (SQLException e) {
e.printStackTrace();
return null;
}
}
}
}
return catalogMetaData;
}
@Override
public QContext getContext() {
return context;
}
private Connection getRawConnection() throws SQLException {
return getCatalogConnection().getRawConnection();
}
@Override
public BasePreparedStatementImpl prepareStatement(String sql) throws SQLException {
return prepareStatement(sql, false);
}
@Override
public BasePreparedStatementImpl prepareStatement(String sql, boolean native_) throws SQLException {
return prepareStatement(sql, native_, false);
}
@SuppressWarnings("resource")
@Override
public BasePreparedStatementImpl prepareStatement(String sql, boolean native_, boolean updatable) throws SQLException {
if (!native_)
sql = translate(sql);
PreparedStatement sqlPreparedStatement = null;
if (updatable)
sqlPreparedStatement = getRawConnection().prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
else
sqlPreparedStatement = getRawConnection().prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
BasePreparedStatementImpl statement = new BasePreparedStatementImpl(this, sqlPreparedStatement, native_);
return statement;
}
@Override
public void setCatalog(String catalog) throws SQLException {
if (catalog != null && catalog.equals(virtualCatalog))
return;
virtualCatalog = catalog;
currentCatalogConnection = null;
getRawConnection();
}
@Override
public String translate(String sql) throws SQLException {
try {
SQLQueryParseResult query = queryParser.parseQuery(sql);
BaseCatalogConnection connection = getCatalogConnection();
if (aliasResolver == null)
aliasResolver = context.get(QAliasResolver.class);
if (aliasResolver != null)
aliasResolver.resolveQuery(this, query);
sql = connection.getQueryWriter().writeQuery(query.getQueryStatement());
} catch (Exception e) {
throw new SQLException("Invalid query", "X", -84, e);
}
return sql;
}
@Override
public void close(QStatement stmt) {
try {
if (stmt != null) {
stmt.close();
}
} catch (Exception e) {
}
}
@Override
public void close(ResultSet rs) {
try {
if (rs != null) {
rs.close();
}
} catch (Exception e) {
}
}
}
|
package org.broadinstitute.sting.playground.gatk.walkers.Recalibration;
import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.ReferenceOrderedDatum;
import org.broadinstitute.sting.gatk.refdata.RODRecordList;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.filters.ZeroMappingQualityReadFilter;
import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceOrderedDataSource;
import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.genotype.Variation;
import java.io.PrintStream;
import java.io.FileNotFoundException;
import java.util.*;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.SAMReadGroupRecord;
@By( DataSource.READS ) // Only look at covered loci, not every loci of the reference file
@WalkerName( "CountCovariatesRefactored" )
@ReadFilters( {ZeroMappingQualityReadFilter.class} ) // Filter out all reads with zero mapping quality
@Requires( {DataSource.READS, DataSource.REFERENCE, DataSource.REFERENCE_BASES} ) // This walker requires both -I input.bam and -R reference.fasta
public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
@Argument(fullName="list", shortName="ls", doc="List the available covariates and exit", required=false)
private Boolean LIST_ONLY = false;
@Argument(fullName="covariate", shortName="cov", doc="Covariates to be used in the recalibration. Each covariate is given as a separate cov parameter. ReadGroup and ReportedQuality are already added for you.", required=false)
private String[] COVARIATES = null;
@Argument(fullName = "use_original_quals", shortName="OQ", doc="If provided, we will use use the quals from the original qualities OQ attribute field instead of the quals in the regular QUALS field", required=false)
private boolean USE_ORIGINAL_QUALS = false;
@Argument(fullName = "window_size_nqs", shortName="nqs", doc="How big of a window should the MinimumNQSCovariate use for its calculation", required=false)
private int WINDOW_SIZE = 3;
@Argument(fullName="recal_file", shortName="recalFile", required=false, doc="Filename for the outputted covariates table recalibration file")
private String RECAL_FILE = "output.recal_data.csv";
@Argument(fullName="no_print_header", shortName="noHeader", required=false, doc="Don't print the usual header on the table recalibration file. For debugging purposes only.")
private boolean NO_PRINT_HEADER = false;
private RecalDataManager dataManager; // Holds the data HashMap, mostly used by TableRecalibrationWalker to create collapsed data hashmaps
private ArrayList<Covariate> requestedCovariates; // A list to hold the covariate objects that were requested
//private HashMap<SAMRecord, String> readGroupHashMap; // A hash map that hashes the read object itself into the read group name
// This is done for optimization purposes because pulling the read group out of the SAMRecord is expensive
private long countedSites = 0; // Number of loci used in the calculations, used for reporting in the output file
private long countedBases = 0; // Number of bases used in the calculations, used for reporting in the output file
private long skippedSites = 0; // Number of loci skipped because it was a dbSNP site, used for reporting in the output file
// initialize
/**
* Parse the -cov arguments and create a list of covariates to be used here
* Based on the covariates' estimates for initial capacity allocate the data hashmap
*/
public void initialize() {
// Get a list of all available covariates
final List<Class<? extends Covariate>> classes = PackageUtils.getClassesImplementingInterface(Covariate.class);
// Print and exit if that's what was requested
if ( LIST_ONLY ) {
out.println( "Available covariates:" );
for( Class<?> covClass : classes ) {
out.println( covClass.getSimpleName() );
}
out.println();
System.exit( 0 ); // early exit here because user requested it
}
// Warn the user if no dbSNP file was specified
boolean foundDBSNP = false;
for( ReferenceOrderedDataSource rod : this.getToolkit().getRodDataSources() ) {
if( rod.getName().equalsIgnoreCase( "dbsnp" ) ) {
foundDBSNP = true;
}
}
if( !foundDBSNP ) {
Utils.warnUser("This calculation is critically dependent on being able to skip over known variant sites. Are you sure you want to be running without a dbSNP rod specified?");
}
// Initialize the requested covariates by parsing the -cov argument
requestedCovariates = new ArrayList<Covariate>();
int estimatedCapacity = 1; // capacity is multiplicitive so this starts at one
if( COVARIATES != null ) {
if(COVARIATES[0].equalsIgnoreCase("ALL")) { // the user wants ALL covariates to be used
requestedCovariates.add( new ReadGroupCovariate() ); // first add the required covariates then add the rest by looping over all implementing classes that were found
requestedCovariates.add( new QualityScoreCovariate() );
for( Class<?> covClass : classes ) {
try {
Covariate covariate = (Covariate)covClass.newInstance();
estimatedCapacity *= covariate.estimatedNumberOfBins();
// Some covariates need parameters (user supplied command line arguments) passed to them
if( covariate instanceof MinimumNQSCovariate ) { covariate = new MinimumNQSCovariate( WINDOW_SIZE ); }
if( !( covariate instanceof ReadGroupCovariate || covariate instanceof QualityScoreCovariate ) ) { // these were already added so don't add them again
requestedCovariates.add( covariate );
}
} catch ( InstantiationException e ) {
throw new StingException( String.format("Can not instantiate covariate class '%s': must be concrete class.", covClass.getSimpleName()) );
} catch ( IllegalAccessException e ) {
throw new StingException( String.format("Can not instantiate covariate class '%s': must have no-arg constructor.", covClass.getSimpleName()) );
}
}
} else { // The user has specified a list of several covariates
int covNumber = 1;
for( String requestedCovariateString : COVARIATES ) {
boolean foundClass = false;
for( Class<?> covClass : classes ) {
if( requestedCovariateString.equalsIgnoreCase( covClass.getSimpleName() ) ) { // -cov argument matches the class name for an implementing class
foundClass = true;
// Read Group Covariate and Quality Score Covariate are required covariates for the recalibration calculation and must begin the list
if( (covNumber == 1 && !requestedCovariateString.equalsIgnoreCase( "ReadGroupCovariate" )) ||
(covNumber == 2 && !requestedCovariateString.equalsIgnoreCase( "QualityScoreCovariate" )) ) {
throw new StingException("ReadGroupCovariate and QualityScoreCovariate are required covariates for the recalibration calculation and must begin the list" );
}
covNumber++;
try {
// Now that we've found a matching class, try to instantiate it
Covariate covariate = (Covariate)covClass.newInstance();
estimatedCapacity *= covariate.estimatedNumberOfBins();
// Some covariates need parameters (user supplied command line arguments) passed to them
if( covariate instanceof MinimumNQSCovariate ) { covariate = new MinimumNQSCovariate( WINDOW_SIZE ); }
requestedCovariates.add( covariate );
} catch ( InstantiationException e ) {
throw new StingException( String.format("Can not instantiate covariate class '%s': must be concrete class.", covClass.getSimpleName()) );
} catch ( IllegalAccessException e ) {
throw new StingException( String.format("Can not instantiate covariate class '%s': must have no-arg constructor.", covClass.getSimpleName()) );
}
}
}
if( !foundClass ) {
throw new StingException( "The requested covariate type (" + requestedCovariateString + ") isn't a valid covariate option. Use --list to see possible covariates." );
}
}
}
} else { // No covariates were specified by the user so add the default, required ones
Utils.warnUser( "Using default set of covariates because none were specified. Using ReadGroupCovariate and QualityScoreCovariate only." );
requestedCovariates.add( new ReadGroupCovariate() );
requestedCovariates.add( new QualityScoreCovariate() );
estimatedCapacity = 300 * 40;
}
logger.info( "The covariates being used here: " );
logger.info( requestedCovariates );
if(estimatedCapacity > 300 * 40 * 200 * 16) { estimatedCapacity = 300 * 40 * 200 * 16; } // Don't want to crash with out of heap space exception
dataManager = new RecalDataManager( estimatedCapacity );
//readGroupHashMap = new HashMap<SAMRecord, String>( 50000000, 0.97f );
}
// map
/**
* For each read at this locus get the various covariate values and increment that location in the map based on
* whether or not the base matches the reference at this particular location
* @param tracker The reference metadata tracker
* @param ref The reference context
* @param context The alignment context
* @return Returns 1, but this value isn't used in the reduce step
*/
public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
// pull out anything passed by -B name,type,file that has the name "dbsnp"
final RODRecordList<ReferenceOrderedDatum> dbsnpRODs = tracker.getTrackData( "dbsnp", null );
boolean isSNP = false;
if (dbsnpRODs != null) {
for( ReferenceOrderedDatum rod : dbsnpRODs ) {
if( ((Variation)rod).isSNP() ) {
isSNP = true; // at least one of the rods says this is a snp site
break;
}
}
}
// Only use data from non-dbsnp sites
// Assume every mismatch at a non-dbsnp site is indicitive of poor quality
if( !isSNP ) {
final List<SAMRecord> reads = context.getReads();
final List<Integer> offsets = context.getOffsets();
SAMRecord read;
int offset;
String readGroupId;
byte[] quals;
byte[] bases;
byte refBase;
byte prevBase;
String platform;
byte[] colorSpaceQuals;
final int numReads = reads.size();
// For each read at this locus
for( int iii = 0; iii < numReads; iii++ ) {
read = reads.get(iii);
//readGroupId = readGroupHashMap.get( read );
//if( readGroupId == null ) { // read is not in the hashmap so add it
// readGroupId = read.getReadGroup().getReadGroupId();
// readGroupHashMap.put( read, readGroupId );
offset = offsets.get(iii); // offset is zero based so quals[offset] and bases[offset] is correct
// skip first and last base because there is no dinuc, this is mainly done for speed so we don't have to check cases
if( offset > 0 && offset < read.getReadLength() - 1 ) {
quals = read.getBaseQualities();
// Check if we need to use the original quality scores instead
if ( USE_ORIGINAL_QUALS && read.getAttribute(RecalDataManager.ORIGINAL_QUAL_ATTRIBUTE_TAG) != null ) {
Object obj = read.getAttribute(RecalDataManager.ORIGINAL_QUAL_ATTRIBUTE_TAG);
if ( obj instanceof String )
quals = QualityUtils.fastqToPhred((String)obj);
else {
throw new RuntimeException(String.format("Value encoded by %s in %s isn't a string!", RecalDataManager.ORIGINAL_QUAL_ATTRIBUTE_TAG, read.getReadName()));
}
}
// skip if base quality is zero
if( quals[offset] > 0 ) {
bases = read.getReadBases(); // BUGBUG: DinucCovariate is relying on this method returning the same byte for bases 'a' and 'A'
refBase = (byte)ref.getBase();
prevBase = bases[offset-1];
// Get the complement base strand if we are a negative strand read
if( read.getReadNegativeStrandFlag() ) {
bases = BaseUtils.simpleComplement( bases ); // this is an expensive call
refBase = (byte)BaseUtils.simpleComplement( ref.getBase() );
prevBase = bases[offset+1];
}
// skip if this base or the previous one was an 'N' or etc.
if( BaseUtils.isRegularBase( (char)prevBase ) && BaseUtils.isRegularBase( (char)bases[offset] ) ) {
final SAMReadGroupRecord readGroup = read.getReadGroup();
readGroupId = readGroup.getReadGroupId(); // this is an expensive call
platform = readGroup.getPlatform(); // this is an expensive call
// SOLID bams insert the reference base into the read if the color space quality is zero, so skip over them
colorSpaceQuals = null;
if( platform.equalsIgnoreCase("SOLID") ) {
colorSpaceQuals = QualityUtils.fastqToPhred((String)read.getAttribute(RecalDataManager.COLOR_SPACE_QUAL_ATTRIBUTE_TAG));
}
if( colorSpaceQuals == null || colorSpaceQuals[offset] > 0 ) //BUGBUG: This isn't exactly correct yet
{
updateDataFromRead( read, offset, readGroupId, platform, quals, bases, refBase );
}
}
}
}
}
countedSites++;
} else { // We skipped over the dbSNP site
skippedSites++;
}
return 1; // This value isn't actually used anywhere
}
/**
* Major workhorse routine for this walker.
* Loop through the list of requested covariates and pick out the value from the read, offset, and reference
* Using the list of covariate values as a key, pick out the RecalDatum and increment,
* adding one to the number of observations and potentially one to the number of mismatches
* Lots of things are passed as parameters to this method as a strategy for optimizing the covariate.getValue calls
* because pulling things out of the SAMRecord is an expensive operation.
* @param read The read
* @param offset The offset in the read for this locus
* @param readGroup The read group the read is in
* @param quals List of base quality scores
* @param bases The bases which make up the read
* @param refBase The reference base at this locus
*/
private void updateDataFromRead(final SAMRecord read, final int offset, final String readGroup, final String platform,
final byte[] quals, final byte[] bases, final byte refBase) {
List<Comparable> key = new ArrayList<Comparable>();
// Loop through the list of requested covariates and pick out the value from the read, offset, and reference
for( Covariate covariate : requestedCovariates ) {
key.add( covariate.getValue( read, offset, readGroup, platform, quals, bases ) );
}
// Using the list of covariate values as a key, pick out the RecalDatum from the data HashMap
RecalDatum datum = dataManager.data.get( key );
if( datum == null ) { // key doesn't exist yet in the map so make a new bucket and add it
datum = new RecalDatum(); // initialized with zeros, will be incremented at end of method
dataManager.data.put( key, datum );
}
// Need the bases to determine whether or not we have a mismatch
byte base = bases[offset];
// Add one to the number of observations and potentially one to the number of mismatches
datum.increment( (char)base, (char)refBase ); // dangerous: if you don't cast to char than the bytes default to the (long, long) version of the increment method which is really bad
countedBases++;
}
// reduce
/**
* Initialize the reudce step by creating a PrintStream from the filename specified as an argument to the walker.
* @return returns A PrintStream created from the -rf filename
*/
public PrintStream reduceInit() {
try {
return new PrintStream( RECAL_FILE );
} catch ( FileNotFoundException e ) {
throw new RuntimeException( "Couldn't open output file: ", e );
}
}
/**
* The Reduce method doesn't do anything for this walker.
* @param value Result of the map. This value is immediately ignored.
* @param recalTableStream The PrintStream used to output the CSV data
* @return returns The PrintStream used to output the CSV data
*/
public PrintStream reduce( Integer value, PrintStream recalTableStream ) {
return recalTableStream; // nothing to do here
}
/**
* Write out the full data hashmap to disk in CSV format
* @param recalTableStream The PrintStream to write out to
*/
public void onTraversalDone( PrintStream recalTableStream ) {
out.print( "Writing raw recalibration data..." );
outputToCSV( recalTableStream );
out.println( "...done!" );
recalTableStream.close();
}
/**
* For each entry (key-value pair) in the data hashmap output the Covariate's values as well as the RecalDatum's data in CSV format
* @param recalTableStream The PrintStream to write out to
*/
private void outputToCSV( final PrintStream recalTableStream ) {
if( !NO_PRINT_HEADER ) {
recalTableStream.printf("# Counted Sites %d%n", countedSites);
recalTableStream.printf("# Counted Bases %d%n", countedBases);
recalTableStream.printf("# Skipped Sites %d%n", skippedSites);
recalTableStream.printf("# Fraction Skipped 1 / %.0f bp%n", (double)countedSites / skippedSites);
for( Covariate cov : requestedCovariates ) {
// The "@!" is a code for TableRecalibrationWalker to recognize this line as a Covariate class name
recalTableStream.println( "@!" + cov.getClass().getSimpleName() );
}
}
// For each entry in the data hashmap
for( Map.Entry<List<? extends Comparable>, RecalDatum> entry : dataManager.data.entrySet() ) {
// For each Covariate in the key
for( Comparable comp : entry.getKey() ) {
// Output the Covariate's value
if( NO_PRINT_HEADER && comp instanceof String ) { continue; } // BUGBUG
recalTableStream.print( comp + "," );
}
// Output the RecalDatum entry
recalTableStream.println( entry.getValue().outputToCSV() );
}
}
}
|
package org.osmdroid.tileprovider;
import java.util.HashMap;
import microsoft.mappoint.TileSystem;
import org.osmdroid.tileprovider.constants.OpenStreetMapTileProviderConstants;
import org.osmdroid.tileprovider.tilesource.ITileSource;
import org.osmdroid.util.TileLooper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Handler;
/**
* This is an abstract class. The tile provider is responsible for:
* <ul>
* <li>determining if a map tile is available,</li>
* <li>notifying the client, via a callback handler</li>
* </ul>
* see {@link MapTile} for an overview of how tiles are served by this provider.
*
* @author Marc Kurtz
* @author Nicolas Gramlich
*
*/
public abstract class MapTileProviderBase implements IMapTileProviderCallback,
OpenStreetMapTileProviderConstants {
private static final Logger logger = LoggerFactory.getLogger(MapTileProviderBase.class);
protected final MapTileCache mTileCache;
protected Handler mTileRequestCompleteHandler;
protected boolean mUseDataConnection = true;
private ITileSource mTileSource;
public abstract Drawable getMapTile(MapTile pTile);
public abstract void detach();
/**
* Gets the minimum zoom level this tile provider can provide
*
* @return the minimum zoom level
*/
public abstract int getMinimumZoomLevel();
/**
* Gets the maximum zoom level this tile provider can provide
*
* @return the maximum zoom level
*/
public abstract int getMaximumZoomLevel();
/**
* Sets the tile source for this tile provider.
*
* @param pTileSource
* the tile source
*/
public void setTileSource(final ITileSource pTileSource) {
mTileSource = pTileSource;
clearTileCache();
}
/**
* Gets the tile source for this tile provider.
*
* @return the tile source
*/
public ITileSource getTileSource() {
return mTileSource;
}
public MapTileProviderBase(final ITileSource pTileSource) {
this(pTileSource, null);
}
public MapTileProviderBase(final ITileSource pTileSource,
final Handler pDownloadFinishedListener) {
mTileCache = new MapTileCache();
mTileRequestCompleteHandler = pDownloadFinishedListener;
mTileSource = pTileSource;
}
/**
* Called by implementation class methods indicating that they have completed the request as
* best it can. The tile is added to the cache, and a MAPTILE_SUCCESS_ID message is sent.
*
* @param pState
* the map tile request state object
* @param pDrawable
* the Drawable of the map tile
*/
@Override
public void mapTileRequestCompleted(final MapTileRequestState pState, final Drawable pDrawable) {
final MapTile tile = pState.getMapTile();
if (pDrawable != null) {
mTileCache.putTile(tile, pDrawable);
}
// tell our caller we've finished and it should update its view
if (mTileRequestCompleteHandler != null) {
mTileRequestCompleteHandler.sendEmptyMessage(MapTile.MAPTILE_SUCCESS_ID);
}
if (DEBUGMODE) {
logger.debug("MapTile request complete: " + tile);
}
}
/**
* Called by implementation class methods indicating that they have failed to retrieve the
* requested map tile. a MAPTILE_FAIL_ID message is sent.
*
* @param pState
* the map tile request state object
*/
@Override
public void mapTileRequestFailed(final MapTileRequestState pState) {
final MapTile tile = pState.getMapTile();
if (mTileRequestCompleteHandler != null) {
mTileRequestCompleteHandler.sendEmptyMessage(MapTile.MAPTILE_FAIL_ID);
}
if (DEBUGMODE) {
logger.debug("MapTile request failed: " + tile);
}
}
public void setTileRequestCompleteHandler(final Handler handler) {
mTileRequestCompleteHandler = handler;
}
public void ensureCapacity(final int pCapacity) {
mTileCache.ensureCapacity(pCapacity);
}
public void clearTileCache() {
mTileCache.clear();
}
/**
* Whether to use the network connection if it's available.
*/
@Override
public boolean useDataConnection() {
return mUseDataConnection;
}
/**
* Set whether to use the network connection if it's available.
*
* @param pMode
* if true use the network connection if it's available. if false don't use the
* network connection even if it's available.
*/
public void setUseDataConnection(final boolean pMode) {
mUseDataConnection = pMode;
}
/**
* Recreate the cache using scaled versions of the tiles currently in it
* @param pNewZoomLevel the zoom level that we need now
* @param pOldZoomLevel the previous zoom level that we should get the tiles to rescale
* @param pViewPort the view port we need tiles for
*/
public void rescaleCache(final int pNewZoomLevel, final int pOldZoomLevel, final Rect pViewPort) {
if (pNewZoomLevel == pOldZoomLevel) {
return;
}
final long startMs = System.currentTimeMillis();
logger.info("rescale tile cache from "+ pOldZoomLevel + " to " + pNewZoomLevel);
final int tileSize = getTileSource().getTileSizePixels();
final int worldSize_2 = TileSystem.MapSize(pNewZoomLevel) >> 1;
final Rect viewPort = new Rect(pViewPort);
viewPort.offset(worldSize_2, worldSize_2);
final ScaleTileLooper tileLooper = pNewZoomLevel > pOldZoomLevel
? new ZoomInTileLooper(pOldZoomLevel)
: new ZoomOutTileLooper(pOldZoomLevel);
tileLooper.loop(null, pNewZoomLevel, tileSize, viewPort);
final long endMs = System.currentTimeMillis();
logger.info("Finished rescale in " + (endMs - startMs) + "ms");
}
private abstract class ScaleTileLooper extends TileLooper {
/** new (scaled) tiles to add to cache
* NB first generate all and then put all in cache,
* otherwise the ones we need will be pushed out */
protected final HashMap<MapTile, Bitmap> mNewTiles;
protected final int mOldZoomLevel;
protected int mDiff;
protected int mTileSize_2;
protected Rect mSrcRect;
protected Rect mDestRect;
protected Paint mDebugPaint;
public ScaleTileLooper(final int pOldZoomLevel) {
mOldZoomLevel = pOldZoomLevel;
mNewTiles = new HashMap<MapTile, Bitmap>();
mSrcRect = new Rect();
mDestRect = new Rect();
mDebugPaint = new Paint();
}
@Override
public void initialiseLoop(final int pZoomLevel, final int pTileSizePx) {
mDiff = Math.abs(pZoomLevel - mOldZoomLevel);
mTileSize_2 = pTileSizePx >> mDiff;
}
@Override
public void handleTile(final Canvas pCanvas, final int pTileSizePx, final MapTile pTile, final int pX, final int pY) {
// Get tile from cache.
// If it's found then no need to created scaled version.
// If not found (null) them we've initiated a new request for it,
// and now we'll create a scaled version until the request completes.
final Drawable requestedTile = getMapTile(pTile);
if (requestedTile == null) {
try {
handleTile(pTileSizePx, pTile, pX, pY);
} catch(final OutOfMemoryError e) {
logger.error("OutOfMemoryError rescaling cache");
}
}
}
@Override
public void finaliseLoop() {
// now add the new ones, pushing out the old ones
while (!mNewTiles.isEmpty()) {
final MapTile tile = mNewTiles.keySet().iterator().next();
final Bitmap bitmap = mNewTiles.remove(tile);
final ExpirableBitmapDrawable drawable = new ExpirableBitmapDrawable(bitmap);
drawable.setState(new int[] { ExpirableBitmapDrawable.EXPIRED });
mTileCache.putTile(tile, drawable);
}
}
protected abstract void handleTile(int pTileSizePx, MapTile pTile, int pX, int pY);
}
private class ZoomInTileLooper extends ScaleTileLooper {
public ZoomInTileLooper(final int pOldZoomLevel) {
super(pOldZoomLevel);
}
@Override
public void handleTile(final int pTileSizePx, final MapTile pTile, final int pX, final int pY) {
// get the correct fraction of the tile from cache and scale up
final MapTile oldTile = new MapTile(mOldZoomLevel, pTile.getX() >> mDiff, pTile.getY() >> mDiff);
final Drawable oldDrawable = mTileCache.getMapTile(oldTile);
if (oldDrawable instanceof BitmapDrawable) {
final Bitmap oldBitmap = ((BitmapDrawable)oldDrawable).getBitmap();
final int xx = (pX % (1 << mDiff)) * mTileSize_2;
final int yy = (pY % (1 << mDiff)) * mTileSize_2;
mSrcRect.set(xx, yy, xx + mTileSize_2, yy + mTileSize_2);
mDestRect.set(0, 0, pTileSizePx, pTileSizePx);
final Bitmap bitmap = Bitmap.createBitmap(pTileSizePx, pTileSizePx, Bitmap.Config.ARGB_8888);
final Canvas canvas = new Canvas(bitmap);
canvas.drawBitmap(oldBitmap, mSrcRect, mDestRect, null);
if (DEBUGMODE) {
logger.debug("Created scaled tile: " + pTile);
mDebugPaint.setTextSize(40);
canvas.drawText("scaled", 50, 50, mDebugPaint);
}
mNewTiles.put(pTile, bitmap);
}
}
}
private class ZoomOutTileLooper extends ScaleTileLooper {
public ZoomOutTileLooper(final int pOldZoomLevel) {
super(pOldZoomLevel);
}
@Override
protected void handleTile(final int pTileSizePx, final MapTile pTile, final int pX, final int pY) {
// get many tiles from cache and make one tile from them
final int xx = pTile.getX() << mDiff;
final int yy = pTile.getY() << mDiff;
final int numTiles = 1 << mDiff;
Bitmap bitmap = null;
Canvas canvas = null;
for(int x = 0; x < numTiles; x++) {
for(int y = 0; y < numTiles; y++) {
final MapTile oldTile = new MapTile(mOldZoomLevel, xx + x, yy + y);
final Drawable oldDrawable = mTileCache.getMapTile(oldTile);
if (oldDrawable instanceof BitmapDrawable) {
final Bitmap oldBitmap = ((BitmapDrawable)oldDrawable).getBitmap();
if (oldBitmap != null) {
if (bitmap == null) {
bitmap = Bitmap.createBitmap(pTileSizePx, pTileSizePx, Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
canvas.drawColor(Color.LTGRAY);
}
mDestRect.set(
x * mTileSize_2, y * mTileSize_2,
(x + 1) * mTileSize_2, (y + 1) * mTileSize_2);
if (oldBitmap != null) {
canvas.drawBitmap(oldBitmap, null, mDestRect, null);
mTileCache.mCachedTiles.remove(oldBitmap);
}
}
}
}
}
if (bitmap != null) {
mNewTiles.put(pTile, bitmap);
if (DEBUGMODE) {
logger.debug("Created scaled tile: " + pTile);
mDebugPaint.setTextSize(40);
canvas.drawText("scaled", 50, 50, mDebugPaint);
}
}
}
}
}
|
package org.codehaus.jparsec.examples.java.parser;
import static org.codehaus.jparsec.examples.java.parser.ExpressionParser.IDENTIFIER;
import static org.codehaus.jparsec.examples.java.parser.StatementParser.SYSTEM_MODIFIER;
import static org.codehaus.jparsec.examples.java.parser.TerminalParserTest.assertFailure;
import static org.codehaus.jparsec.examples.java.parser.TerminalParserTest.assertResult;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import junit.framework.TestCase;
import org.codehaus.jparsec.Parser;
import org.codehaus.jparsec.examples.java.ast.declaration.AnnotationDef;
import org.codehaus.jparsec.examples.java.ast.declaration.ClassDef;
import org.codehaus.jparsec.examples.java.ast.declaration.ClassInitializerDef;
import org.codehaus.jparsec.examples.java.ast.declaration.ConstructorDef;
import org.codehaus.jparsec.examples.java.ast.declaration.Declaration;
import org.codehaus.jparsec.examples.java.ast.declaration.DefBody;
import org.codehaus.jparsec.examples.java.ast.declaration.EnumDef;
import org.codehaus.jparsec.examples.java.ast.declaration.FieldDef;
import org.codehaus.jparsec.examples.java.ast.declaration.Import;
import org.codehaus.jparsec.examples.java.ast.declaration.InterfaceDef;
import org.codehaus.jparsec.examples.java.ast.declaration.Member;
import org.codehaus.jparsec.examples.java.ast.declaration.MethodDef;
import org.codehaus.jparsec.examples.java.ast.declaration.Program;
import org.codehaus.jparsec.examples.java.ast.declaration.QualifiedName;
import org.codehaus.jparsec.examples.java.ast.declaration.TypeParameterDef;
/**
* Unit test for {@link DeclarationParser}.
*
* @author Ben Yu
*/
public class DeclarationParserTest extends TestCase {
private static final Parser<Member> FIELD = DeclarationParser.fieldDef(IDENTIFIER);
public void testRemoveNulls() {
List<?> list = new ArrayList<String>(Arrays.asList("a", "b", null, "1", "2", null));
DeclarationParser.removeNulls(list);
assertEquals(Arrays.asList("a", "b", "1", "2"), list);
}
public void testFieldDef() {
assertResult(FIELD, "int f;", FieldDef.class, "int f;");
assertResult(FIELD, "static final int f;", FieldDef.class, "static final int f;");
assertResult(FIELD, "int f = foo;", FieldDef.class, "int f = foo;");
assertResult(FIELD, "int[] a = {foo};", FieldDef.class, "int[] a = {foo};");
}
public void testBody() {
Parser<DefBody> parser = DeclarationParser.body(FIELD);
assertResult(parser, "{}", DefBody.class, "{}");
assertResult(parser, "{int f;}", DefBody.class, "{int f;}");
assertResult(parser, "{int f=foo; int g;}", DefBody.class, "{int f = foo; int g;}");
assertResult(parser, "{;int f=foo;;; int g;;}", DefBody.class, "{int f = foo; int g;}");
}
public void testTypeParameter() {
Parser<TypeParameterDef> parser = DeclarationParser.TYPE_PARAMETER;
assertResult(parser, "T", TypeParameterDef.class, "T");
assertResult(parser, "T extends F", TypeParameterDef.class, "T extends F");
assertResult(parser, "T extends Enum<T>", TypeParameterDef.class, "T extends Enum<T>");
assertResult(parser, "T extends Enum<?>", TypeParameterDef.class, "T extends Enum<?>");
assertFailure(parser, "T extends ?", 1, 11, "? encountered.");
}
public void testMethodDef() {
Parser<Member> parser = DeclarationParser.methodDef(
SYSTEM_MODIFIER, IDENTIFIER, StatementParser.BREAK);
assertResult(parser, "public static void f();", MethodDef.class, "public static void f();");
assertResult(parser, "String f() default foo;", MethodDef.class, "String f() default foo;");
assertResult(parser, "void f() throws E;",
MethodDef.class, "void f() throws E;");
assertResult(parser, "void f() throws E, F<T>;",
MethodDef.class, "void f() throws E, F<T>;");
assertFailure(parser, "void f() throws", 1, 16);
assertFailure(parser, "void f() throws E[];", 1, 18);
assertResult(parser, "void f() {}", MethodDef.class, "void f() {}");
assertResult(parser, "void f() {break; break;}",
MethodDef.class, "void f() {break; break;}");
assertResult(parser, "void f(int i) {}",
MethodDef.class, "void f(int i) {}");
assertResult(parser, "void f(final int i, List<Foo> l) {}",
MethodDef.class, "void f(final int i, List<Foo> l) {}");
assertResult(parser, "<K, V extends K> void f(int i) {}",
MethodDef.class, "<K, V extends K> void f(int i) {}");
}
public void testConstructorDef() {
Parser<Member> parser = DeclarationParser.constructorDef(
SYSTEM_MODIFIER, StatementParser.BREAK);
assertResult(parser, "public Foo(){}", ConstructorDef.class, "public Foo() {}");
assertResult(parser, "Foo() throws E{break;}", ConstructorDef.class, "Foo() throws E {break;}");
assertResult(parser, "Foo(int i) {}", ConstructorDef.class, "Foo(int i) {}");
assertResult(parser, "Foo(final int i, List<Foo> l) {}",
ConstructorDef.class, "Foo(final int i, List<Foo> l) {}");
}
public void testInitializerDef() {
Parser<Member> parser = DeclarationParser.initializerDef(StatementParser.BREAK);
assertResult(parser, "static {}", ClassInitializerDef.class, "static {}");
assertResult(parser, "static {break;}", ClassInitializerDef.class, "static {break;}");
assertResult(parser, " {}", ClassInitializerDef.class, "{}");
assertResult(parser, " {break;}", ClassInitializerDef.class, "{break;}");
}
public void testClassDef() {
Parser<Declaration> parser = DeclarationParser.classDef(SYSTEM_MODIFIER, FIELD);
assertResult(parser, "public final class Foo {}", ClassDef.class, "public final class Foo {}");
assertResult(parser, "final class Foo<T> {}", ClassDef.class, "final class Foo<T> {}");
assertResult(parser, "final class Foo<T extends Foo<T>, K> {}",
ClassDef.class, "final class Foo<T extends Foo<T>, K> {}");
assertResult(parser, "final class Foo<T extends Foo<T>> extends ArrayList<?> {}",
ClassDef.class, "final class Foo<T extends Foo<T>> extends ArrayList<?> {}");
assertResult(parser, "final class Foo<T extends Foo<T>> implements List<?> {}",
ClassDef.class, "final class Foo<T extends Foo<T>> implements List<?> {}");
assertResult(parser, "final class Foo<T extends Foo<T>> implements List<?>, Iterable<T> {}",
ClassDef.class, "final class Foo<T extends Foo<T>> implements List<?>, Iterable<T> {}");
assertResult(parser, "final class Foo<T extends Foo<T>> {public static final String S = foo;}",
ClassDef.class, "final class Foo<T extends Foo<T>> {public static final String S = foo;}");
assertResult(parser, "final class Foo<T extends Foo<T>> {int i; int j;}",
ClassDef.class, "final class Foo<T extends Foo<T>> {int i; int j;}");
}
public void testInterfaceDef() {
Parser<Declaration> parser = DeclarationParser.interfaceDef(SYSTEM_MODIFIER, FIELD);
assertResult(parser, "public native interface Foo {}",
InterfaceDef.class, "public native interface Foo {}");
assertResult(parser, "interface Foo<T> {}", InterfaceDef.class, "interface Foo<T> {}");
assertResult(parser, "interface Foo<T extends Foo<T>, K> {}",
InterfaceDef.class, "interface Foo<T extends Foo<T>, K> {}");
assertResult(parser, "interface Foo<T extends Foo<T>> extends List<?> {}",
InterfaceDef.class, "interface Foo<T extends Foo<T>> extends List<?> {}");
assertFailure(parser, "interface Foo implements List {}", 1, 15, "implements encountered.");
assertResult(parser, "interface Foo<T extends Foo<T>> extends List<?>, Iterable<T> {}",
InterfaceDef.class, "interface Foo<T extends Foo<T>> extends List<?>, Iterable<T> {}");
assertResult(parser, "interface Foo<T extends Foo<T>> {public static String S = foo;}",
InterfaceDef.class, "interface Foo<T extends Foo<T>> {public static String S = foo;}");
assertResult(parser, "interface Foo<T extends Foo<T>> {int i; int j;}",
InterfaceDef.class, "interface Foo<T extends Foo<T>> {int i; int j;}");
}
public void testAnnotationDef() {
Parser<Declaration> parser =
DeclarationParser.annotationDef(StatementParser.modifier(IDENTIFIER), FIELD);
assertResult(parser, "@interface Foo{}", AnnotationDef.class, "@interface Foo {}");
assertResult(parser, "@Target({METHOD, FIELD}) @RetentionPolicy(RUNTIME) @interface Foo{}",
AnnotationDef.class, "@Target({METHOD, FIELD}) @RetentionPolicy(RUNTIME) @interface Foo {}");
assertResult(parser, "@interface Foo{int i;int j;}",
AnnotationDef.class, "@interface Foo {int i; int j;}");
}
public void testEnumDef() {
Parser<Declaration> parser = DeclarationParser.enumDef(IDENTIFIER, FIELD);
assertResult(parser, "enum Foo {}", EnumDef.class, "enum Foo {}");
assertResult(parser, "enum Foo {FOO{int x;}}", EnumDef.class, "enum Foo {FOO {int x;}}");
assertResult(parser, "@For(Test) enum Foo {}", EnumDef.class, "@For(Test) enum Foo {}");
assertResult(parser, "enum Foo implements Comparable<Foo>, Serializable {}",
EnumDef.class, "enum Foo implements Comparable<Foo>, Serializable {}");
assertResult(parser, "enum Foo {ONE, TWO(two); int i; int j;}",
EnumDef.class, "enum Foo {ONE, TWO(two); int i; int j;}");
assertResult(parser, "enum Foo {ONE, TWO}", EnumDef.class, "enum Foo {ONE, TWO}");
}
public void testQualifiedName() {
Parser<QualifiedName> parser = DeclarationParser.QUALIFIED_NAME;
assertResult(parser, "foo.bar", QualifiedName.class, "foo.bar");
assertResult(parser, "foo", QualifiedName.class, "foo");
}
public void testPackage() {
Parser<QualifiedName> parser = DeclarationParser.PACKAGE;
assertResult(parser, "package foo.bar;", QualifiedName.class, "foo.bar");
assertResult(parser, "package foo;", QualifiedName.class, "foo");
}
public void testImport() {
Parser<Import> parser = DeclarationParser.IMPORT;
assertResult(parser, "import foo;", Import.class, "import foo;");
assertResult(parser, "import foo.bar;", Import.class, "import foo.bar;");
assertResult(parser, "import foo.bar.*;", Import.class, "import foo.bar.*;");
assertResult(parser, "import static foo;", Import.class, "import static foo;");
assertResult(parser, "import static foo.*;", Import.class, "import static foo.*;");
}
public void testProgram() {
Parser<Program> parser = DeclarationParser.program();
assertResult(parser, "package foo; import foo.bar.*; class Foo {int[] a = {1}; Foo(){}}",
Program.class, "package foo; import foo.bar.*; class Foo {int[] a = {1}; Foo() {}}");
assertResult(parser, "class Foo {{} static {}}",
Program.class, "class Foo {{} static {}}");
assertResult(parser, "package foo; import foo.bar.*; enum Foo {}",
Program.class, "package foo; import foo.bar.*; enum Foo {}");
assertResult(parser, "enum Foo {;static {1;} static {2;} {3;} {4;}}",
Program.class, "enum Foo {; static {1;} static {2;} {3;} {4;}}");
assertResult(parser, "package foo; import foo.bar.*; interface Foo {int i = 1;}",
Program.class, "package foo; import foo.bar.*; interface Foo {int i = 1;}");
assertResult(parser, "package foo; import foo.bar.*; @interface Foo {int[] value() default {1};}",
Program.class, "package foo; import foo.bar.*; @interface Foo {int[] value() default {1};}");
assertResult(parser, "import foo.bar.*; class Foo<T> implements Bar {} interface Bar {}",
Program.class, "import foo.bar.*; class Foo<T> implements Bar {} interface Bar {}");
assertResult(parser, "class Foo {class Bar {}}",
Program.class, "class Foo {class Bar {}}");
assertResult(parser, "class Foo {private static final class Bar {}}",
Program.class, "class Foo {private static final class Bar {}}");
assertResult(parser, "class Foo {enum Bar {B}}",
Program.class, "class Foo {enum Bar {B}}");
assertResult(parser, "class Foo {@interface Bar {;;}}",
Program.class, "class Foo {@interface Bar {}}");
}
}
|
import java.net.*;
import java.io.*;
import java.util.*;
public class update {
static final int ZONE = dns.QUERY;
static final int PREREQ = dns.ANSWER;
static final int UPDATE = dns.AUTHORITY;
static final int ADDITIONAL = dns.ADDITIONAL;
dnsMessage query;
dnsResolver res;
String server;
dnsName origin;
int defaultTTL;
short defaultClass = dns.IN;
short lastRcode;
public
update(String _server) throws IOException {
query = new dnsMessage();
query.getHeader().setOpcode(dns.UPDATE);
InputStreamReader isr = new InputStreamReader(System.in);
BufferedReader br = new BufferedReader(isr);
while (true) {
System.out.print("> ");
String line = dnsIO.readExtendedLine(br);
MyStringTokenizer st = new MyStringTokenizer(line);
if (!st.hasMoreTokens())
continue;
String operation = st.nextToken();
if (operation.equals("server")) {
server = st.nextToken();
res = new dnsResolver(server);
}
else if (operation.equals("key")) {
String keyname = st.nextToken();
String keydata = st.nextToken();
if (res == null)
res = new dnsResolver(server);
res.setTSIGKey(keyname, keydata);
}
else if (operation.equals("port")) {
if (res == null)
res = new dnsResolver(server);
res.setPort(Short.parseShort(st.nextToken()));
}
else if (operation.equals("tcp")) {
if (res == null)
res = new dnsResolver(server);
res.setTCP(true);
}
else if (operation.equals("class")) {
String s = st.nextToken();
short newClass = dns.classValue(s);
if (newClass > 0)
defaultClass = newClass;
else
System.out.println("Invalid class " + newClass);
}
else if (operation.equals("ttl"))
defaultTTL = Integer.parseInt(st.nextToken());
else if (operation.equals("origin"))
origin = new dnsName(st.nextToken());
else if (operation.equals("require"))
doRequire(st);
else if (operation.equals("prohibit"))
doProhibit(st);
else if (operation.equals("add"))
doAdd(st);
else if (operation.equals("delete"))
doDelete(st);
else if (operation.equals("glue"))
doGlue(st);
else if (operation.equals("help")) {
if (st.hasMoreTokens())
help(st.nextToken());
else
help(null);
}
else if (operation.equals("send")) {
if (res == null)
res = new dnsResolver(server);
sendUpdate();
query = new dnsMessage();
query.getHeader().setOpcode(dns.UPDATE);
}
else if (operation.equals("quit"))
System.exit(0);
else if (operation.equals("assert")) {
String s = st.nextToken();
String rcodeString = dns.rcodeString(lastRcode);
if (!s.equalsIgnoreCase(rcodeString)) {
System.out.println("Expected rcode " + s +
", received " + rcodeString);
if (st.hasMoreTokens()) {
s = st.nextToken();
System.out.println(s);
}
System.exit(-1);
}
}
else
System.out.println("invalid keyword: " + operation);
}
}
void
sendUpdate() throws IOException {
if (query.getHeader().getCount(ZONE) == 0) {
dnsName zone = origin;
short dclass = defaultClass;
if (zone == null) {
Vector updates = query.getSection(UPDATE);
if (updates == null) {
System.out.println("Invalid update");
return;
}
dnsRecord r = (dnsRecord) updates.elementAt(0);
zone = new dnsName(r.getName(), 1);
dclass = r.dclass;
}
dnsRecord soa = dnsRecord.newRecord(zone, dns.SOA, dclass);
query.addRecord(ZONE, soa);
}
dnsMessage response = res.send(query);
if (response == null)
return;
lastRcode = response.getHeader().getRcode();
System.out.print(";; ->>HEADER<<- ");
System.out.print("opcode: ");
System.out.print(dns.opcodeString(response.getHeader().getOpcode()));
System.out.print(", status: ");
System.out.print(dns.rcodeString(response.getHeader().getRcode()));
System.out.println(", id: " + response.getHeader().getID());
System.out.print(";; flags: " + response.getHeader().printFlags());
System.out.print("; ");
for (int i = 0; i < 4; i++) {
System.out.print(dns.sectionString(i));
System.out.print(": ");
System.out.print(response.getHeader().getCount(i));
System.out.print(" ");
}
System.out.println();
System.out.println(";; done");
}
/*
* <name> [ttl] [class] <type> <data>
* Ignore the class, if present.
*/
dnsRecord
parseRR(MyStringTokenizer st, short classValue, int TTLValue)
throws IOException
{
dnsName name = new dnsName(st.nextToken(), origin);
int ttl;
short type;
String s = st.nextToken();
try {
ttl = Integer.parseInt(s);
s = st.nextToken();
}
catch (NumberFormatException e) {
ttl = TTLValue;
}
if (dns.classValue(s) >= 0)
s = st.nextToken();
if ((type = dns.typeValue(s)) < 0)
throw new IOException("Parse error");
return dnsRecord.fromString(name, type, classValue, ttl, st, origin);
}
/*
* <name> <type>
*/
dnsRecord
parseRRExistence(MyStringTokenizer st, short classValue) throws IOException {
dnsName name = new dnsName(st.nextToken(), origin);
short type;
if ((type = dns.typeValue(st.nextToken())) < 0)
throw new IOException("Parse error");
return dnsRecord.newRecord(name, type, classValue, 0);
}
/*
* <name>
*/
dnsRecord
parseName(MyStringTokenizer st, short classValue) throws IOException {
dnsName name = new dnsName(st.nextToken(), origin);
return dnsRecord.newRecord(name, dns.ANY, classValue, 0);
}
void
doRequire(MyStringTokenizer st) throws IOException {
dnsRecord rec;
String qualifier = st.nextToken();
if (qualifier.equals("-r"))
rec = parseRR(st, defaultClass, 0);
else if (qualifier.equals("-s"))
rec = parseRRExistence(st, dns.ANY);
else if (qualifier.equals("-n"))
rec = parseName(st, dns.ANY);
else {
System.out.println("qualifier " + qualifier + " not supported");
return;
}
if (rec != null) {
query.addRecord(PREREQ, rec);
System.out.println(rec);
}
}
void
doProhibit(MyStringTokenizer st) throws IOException {
dnsRecord rec;
String qualifier = st.nextToken();
if (qualifier.equals("-r"))
rec = parseRR(st, defaultClass, 0);
else if (qualifier.equals("-s"))
rec = parseRRExistence(st, dns.NONE);
else if (qualifier.equals("-n"))
rec = parseName(st, dns.NONE);
else {
System.out.println("qualifier " + qualifier + " not supported");
return;
}
if (rec != null) {
query.addRecord(PREREQ, rec);
System.out.println(rec);
}
}
void
doAdd(MyStringTokenizer st) throws IOException {
dnsRecord rec;
String qualifier = st.nextToken();
if (!qualifier.startsWith("-")) {
st.putBackToken(qualifier);
qualifier = "-r";
}
if (qualifier.equals("-r"))
rec = parseRR(st, defaultClass, defaultTTL);
else {
System.out.println("qualifier " + qualifier + " not supported");
return;
}
if (rec != null) {
query.addRecord(UPDATE, rec);
System.out.println(rec);
}
}
void
doDelete(MyStringTokenizer st) throws IOException {
dnsRecord rec;
String qualifier = st.nextToken();
if (qualifier.equals("-r"))
rec = parseRR(st, dns.NONE, 0);
else if (qualifier.equals("-s"))
rec = parseRRExistence(st, dns.ANY);
else if (qualifier.equals("-n"))
rec = parseName(st, dns.ANY);
else {
System.out.println("qualifier " + qualifier + " not supported");
return;
}
if (rec != null) {
query.addRecord(UPDATE, rec);
System.out.println(rec);
}
}
void
doGlue(MyStringTokenizer st) throws IOException {
dnsRecord rec;
String qualifier = st.nextToken();
if (!qualifier.startsWith("-")) {
st.putBackToken(qualifier);
qualifier = "-r";
}
if (qualifier.equals("-r"))
rec = parseRR(st, defaultClass, defaultTTL);
else {
System.out.println("qualifier " + qualifier + " not supported");
return;
}
if (rec != null) {
query.addRecord(ADDITIONAL, rec);
System.out.println(rec);
}
}
static void
helpResolver() {
System.out.println("Resolver options:\n" +
" server <name>" +
"\tserver that receives the updates (default: localhost)\n" +
" key <name> <data>" +
"\tTSIG key used to sign the messages\n" +
" port <port>" +
"\t\tUDP/TCP port the message is sent to (default: 53)\n" +
" tcp" +
"\t\t\tTCP should be used to send messages (default: unset)\n"
);
}
static void
helpAttributes() {
System.out.println("Attributes:\n" +
" class <class>\t" +
"class of the zone to be updated (default: IN)\n" +
" ttl <ttl>\t\t" +
"ttl of an added record, if unspecified (default: 0)\n" +
" origin <origin>\t" +
"default origin of each record name (default: .)\n"
);
};
static void
helpData() {
System.out.println("Data:\n" +
" require/prohibit\t" +
"require that a record, set, or name is/is not present\n" +
"\t-r <name> [ttl] [class] <type> <data ...> \n" +
"\t-s <name> <type> \n" +
"\t-n <name> \n\n" +
" add\t\t" +
"specify a record to be added\n" +
"\t[-r] <name> [ttl] [class] <type> <data ...> \n\n" +
" delete\t" +
"specify a record, set, or all records at a name to be deleted\n" +
"\t-r <name> [ttl] [class] <type> <data ...> \n" +
"\t-s <name> <type> \n" +
"\t-n <name> \n\n" +
" glue\t" +
"specify an additional record\n" +
"\t[-r] <name> [ttl] [class] <type> <data ...> \n"
);
}
static void
helpOperations() {
System.out.println("Operations:\n" +
" help [topic]\t" +
"this information\n" +
" send\t\t" +
"sends the update and resets the current query\n" +
" quit\t\t" +
"quits the program\n"
);
}
static void
help(String topic) {
if (topic != null) {
if (topic.equalsIgnoreCase("resolver"))
helpResolver();
else if (topic.equalsIgnoreCase("attributes"))
helpAttributes();
else if (topic.equalsIgnoreCase("data"))
helpData();
else if (topic.equalsIgnoreCase("operations"))
helpOperations();
else
System.out.println ("Topic " + topic + " unrecognized");
return;
}
System.out.println("The help topics are:\n" +
" Resolver\t" +
"Properties of the resolver and DNS\n" +
" Attributes\t" +
"Properties of some/all records\n" +
" Data\t" +
"Prerequisites, updates, and additional records\n" +
" Operations\t" +
"Actions to be taken\n"
);
}
static void
usage() {
System.out.println("Usage: update [@server]");
System.exit(0);
}
public static void
main(String argv[]) throws IOException {
String server = null;
if (argv.length == 0)
server = "localhost";
else if (argv.length == 1 && argv[0].startsWith("@"))
server = argv[0].substring(1);
else
usage();
update u = new update(server);
}
}
|
// Look at using quick-json library from Google as alternative
package com.microservices.rentaloffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.gson.Gson;
public class NeedPacket {
private String id;
private String userid;
private Level level;
private List<String> signs = new ArrayList<>();
private int readCount = 0;
public NeedPacket() {
id = IDProvider.getId();
}
public static final String NEED = "car_rental_offer";
private final List<Solution> solutions = new ArrayList<>();
public String toJson(String sign) {
signs.add(sign);
return toJson();
}
private String toJson() {
Map<String, Object> message = new HashMap<>();
message.put("json_class", NeedPacket.class.getName());
message.put("need", NEED);
message.put("id", id);
message.put("level", level);
message.put("userid", userid);
message.put("readCount", readCount);
message.put("solutions", solutions);
message.put("signs", signs);
return new Gson().toJson(message);
}
public String getUserid() {
return userid;
}
public Level getLevel() {
return level;
}
public int getReadCount() {
return readCount;
}
public void increaseReadCount() {
readCount++;
}
public static NeedPacket fromJson(String json) {
return new Gson().fromJson(json, NeedPacket.class);
}
public void proposeSolution(Solution solution) {
solutions.add(solution);
}
public boolean hasNoSolutions() {
return solutions.size() == 0;
}
public boolean hasSolutions() {
return solutions.size() > 0;
}
public List<Solution> getSolutions() {
return solutions;
}
public void setUserid(String userid) {
this.userid = userid;
}
public void setLevel(Level level) {
this.level = level;
}
public String getId() {
return id;
}
@Override
public String toString() {
return toJson();
}
}
|
import java.net.*;
import java.io.*;
import java.util.*;
import org.xbill.DNS.*;
import org.xbill.DNS.utils.*;
/** @author Brian Wellington <bwelling@xbill.org> */
public class update {
Message query, response;
Resolver res;
String server = null;
Name zone = Name.root;
int defaultTTL;
short defaultClass = DClass.IN;
PrintStream log = null;
void
print(Object o) {
System.out.println(o);
if (log != null)
log.println(o);
}
public
update(InputStream in) throws IOException {
List inputs = new LinkedList();
List istreams = new LinkedList();
query = new Message();
query.getHeader().setOpcode(Opcode.UPDATE);
InputStreamReader isr = new InputStreamReader(in);
BufferedReader br = new BufferedReader(isr);
inputs.add(br);
istreams.add(in);
while (true) {
try {
String line = null;
do {
InputStream is;
is = (InputStream)istreams.get(0);
br = (BufferedReader)inputs.get(0);
if (is == System.in)
System.out.print("> ");
line = br.readLine();
if (line == null) {
br.close();
inputs.remove(0);
istreams.remove(0);
if (inputs.isEmpty())
return;
}
} while (line == null);
if (log != null)
log.println("> " + line);
if (line.length() == 0 || line.charAt(0) == '
continue;
/* Allows cut and paste from other update sessions */
if (line.charAt(0) == '>')
line = line.substring(1);
Tokenizer st = new Tokenizer(line);
Tokenizer.Token token = st.get();
if (token.isEOL())
continue;
String operation = token.value;
if (operation.equals("server")) {
server = st.getString();
res = new SimpleResolver(server);
token = st.get();
if (token.isString()) {
String portstr = token.value;
res.setPort(Short.parseShort(portstr));
}
}
else if (operation.equals("key")) {
String keyname = st.getString();
String keydata = st.getString();
if (res == null)
res = new SimpleResolver(server);
res.setTSIGKey(keyname, keydata);
}
else if (operation.equals("edns")) {
if (res == null)
res = new SimpleResolver(server);
res.setEDNS(st.getUInt16());
}
else if (operation.equals("port")) {
if (res == null)
res = new SimpleResolver(server);
res.setPort(st.getUInt16());
}
else if (operation.equals("tcp")) {
if (res == null)
res = new SimpleResolver(server);
res.setTCP(true);
}
else if (operation.equals("class")) {
short newClass = DClass.value(st.getString());
if (newClass > 0)
defaultClass = newClass;
else
print("Invalid class " + newClass);
}
else if (operation.equals("ttl"))
defaultTTL = st.getTTL();
else if (operation.equals("origin") ||
operation.equals("zone"))
{
zone = st.getName(Name.root);
}
else if (operation.equals("require"))
doRequire(st);
else if (operation.equals("prohibit"))
doProhibit(st);
else if (operation.equals("add"))
doAdd(st);
else if (operation.equals("delete"))
doDelete(st);
else if (operation.equals("glue"))
doGlue(st);
else if (operation.equals("help") ||
operation.equals("?"))
{
token = st.get();
if (token.isString())
help(token.value);
else
help(null);
}
else if (operation.equals("echo"))
print(line.substring(4).trim());
else if (operation.equals("send")) {
sendUpdate();
query = new Message();
query.getHeader().setOpcode(Opcode.UPDATE);
}
else if (operation.equals("show")) {
print(query);
}
else if (operation.equals("clear")) {
query = new Message();
query.getHeader().setOpcode(Opcode.UPDATE);
}
else if (operation.equals("query"))
doQuery(st);
else if (operation.equals("quit") ||
operation.equals("q"))
{
if (log != null)
log.close();
Iterator it = inputs.iterator();
while (it.hasNext()) {
BufferedReader tbr;
tbr = (BufferedReader) it.next();
tbr.close();
}
System.exit(0);
}
else if (operation.equals("file"))
doFile(st, inputs, istreams);
else if (operation.equals("log"))
doLog(st);
else if (operation.equals("assert")) {
if (doAssert(st) == false)
return;
}
else if (operation.equals("sleep")) {
long interval = st.getUInt32();
try {
Thread.sleep(interval);
}
catch (InterruptedException e) {
}
}
else if (operation.equals("date")) {
Date now = new Date();
token = st.get();
if (token.isString() &&
token.value.equals("-ms"))
print(Long.toString(now.getTime()));
else
print(now);
}
else
print("invalid keyword: " + operation);
}
catch (TextParseException tpe) {
System.out.println(tpe.getMessage());
}
catch (NullPointerException npe) {
System.out.println("Parse error");
}
catch (InterruptedIOException iioe) {
System.out.println("Operation timed out");
}
catch (SocketException se) {
System.out.println("Socket error");
}
catch (IOException ioe) {
System.out.println(ioe);
}
}
}
void
sendUpdate() throws IOException {
if (query.getHeader().getCount(Section.UPDATE) == 0) {
print("Empty update message. Ignoring.");
return;
}
if (query.getHeader().getCount(Section.ZONE) == 0) {
Name updzone;
updzone = zone;
short dclass = defaultClass;
if (updzone == null) {
Record [] recs = query.getSectionArray(Section.UPDATE);
for (int i = 0; i < recs.length; i++) {
if (updzone == null)
updzone = new Name(recs[i].getName(),
1);
if (recs[i].getDClass() != DClass.NONE &&
recs[i].getDClass() != DClass.ANY)
{
dclass = recs[i].getDClass();
break;
}
}
}
Record soa = Record.newRecord(updzone, Type.SOA, dclass);
query.addRecord(soa, Section.ZONE);
}
if (res == null)
res = new SimpleResolver(server);
response = res.send(query);
print(response);
}
/*
* <name> [ttl] [class] <type> <data>
* Ignore the class, if present.
*/
Record
parseRR(Tokenizer st, short classValue, int TTLValue)
throws IOException
{
Name name = st.getName(zone);
int ttl;
short type;
Record record;
String s = st.getString();
try {
ttl = TTL.parseTTL(s);
s = st.getString();
}
catch (NumberFormatException e) {
ttl = TTLValue;
}
if (DClass.value(s) >= 0) {
classValue = DClass.value(s);
s = st.getString();
}
if ((type = Type.value(s)) < 0)
throw new IOException("Invalid type: " + s);
record = Record.fromString(name, type, classValue, ttl, st, zone);
if (record != null)
return (record);
else
throw new IOException("Parse error");
}
void
doRequire(Tokenizer st) throws IOException {
Tokenizer.Token token;
Name name;
Record record;
short type;
short dclass;
name = st.getName(zone);
token = st.get();
if (token.isString()) {
if ((type = Type.value(token.value)) < 0)
throw new IOException("Invalid type: " + token.value);
token = st.get();
boolean iseol = token.isEOL();
st.unget();
if (!iseol) {
record = Record.fromString(name, type, defaultClass,
0, st, zone);
} else
record = Record.newRecord(name, type, DClass.ANY, 0);
} else
record = Record.newRecord(name, Type.ANY, DClass.ANY, 0);
query.addRecord(record, Section.PREREQ);
print(record);
}
void
doProhibit(Tokenizer st) throws IOException {
Tokenizer.Token token;
String s;
Name name;
Record record;
short type;
name = st.getName(zone);
token = st.get();
if (token.isString()) {
if ((type = Type.value(token.value)) < 0)
throw new IOException("Invalid type: " + token.value);
} else
type = Type.ANY;
record = Record.newRecord(name, type, DClass.NONE, 0);
query.addRecord(record, Section.PREREQ);
print(record);
}
void
doAdd(Tokenizer st) throws IOException {
Record record = parseRR(st, defaultClass, defaultTTL);
query.addRecord(record, Section.UPDATE);
print(record);
}
void
doDelete(Tokenizer st) throws IOException {
Tokenizer.Token token;
String s;
Name name;
Record record;
short type;
short dclass;
name = st.getName(zone);
token = st.get();
if (token.isString()) {
s = token.value;
if ((dclass = DClass.value(s)) >= 0) {
s = st.getString();
}
if ((type = Type.value(s)) < 0)
throw new IOException("Invalid type: " + s);
token = st.get();
boolean iseol = token.isEOL();
st.unget();
if (!iseol) {
record = Record.fromString(name, type, DClass.NONE,
0, st, zone);
} else
record = Record.newRecord(name, type, DClass.ANY, 0);
}
else
record = Record.newRecord(name, Type.ANY, DClass.ANY, 0);
query.addRecord(record, Section.UPDATE);
print(record);
}
void
doGlue(Tokenizer st) throws IOException {
Record record = parseRR(st, defaultClass, defaultTTL);
query.addRecord(record, Section.ADDITIONAL);
print(record);
}
void
doQuery(Tokenizer st) throws IOException {
Record rec;
Tokenizer.Token token;
Name name = null;
short type = Type.A, dclass = defaultClass;
name = st.getName(zone);
token = st.get();
if (token.isString()) {
type = Type.value(token.value);
if (type < 0)
throw new IOException("Invalid type");
token = st.get();
if (token.isString()) {
dclass = DClass.value(token.value);
if (dclass < 0)
throw new IOException("Invalid class");
}
}
rec = Record.newRecord(name, type, dclass);
Message newQuery = Message.newQuery(rec);
if (res == null)
res = new SimpleResolver(server);
response = res.send(newQuery);
print(response);
}
void
doFile(Tokenizer st, List inputs, List istreams) throws IOException {
String s = st.getString();
InputStream is;
try {
if (s.equals("-"))
is = System.in;
else
is = new FileInputStream(s);
istreams.add(0, is);
inputs.add(new BufferedReader(new InputStreamReader(is)));
}
catch (FileNotFoundException e) {
print(s + " not found");
}
}
void
doLog(Tokenizer st) throws IOException {
String s = st.getString();
try {
FileOutputStream fos = new FileOutputStream(s);
log = new PrintStream(fos);
}
catch (Exception e) {
print("Error opening " + s);
}
}
boolean
doAssert(Tokenizer st) throws IOException {
String field = st.getString();
String expected = st.getString();
String value = null;
boolean flag = true;
int section;
if (response == null) {
print("No response has been received");
return true;
}
if (field.equalsIgnoreCase("rcode")) {
short rcode = response.getHeader().getRcode();
if (rcode != Rcode.value(expected)) {
value = Rcode.string(rcode);
flag = false;
}
}
else if (field.equalsIgnoreCase("serial")) {
Record [] answers = response.getSectionArray(Section.ANSWER);
if (answers.length < 1 || !(answers[0] instanceof SOARecord))
print("Invalid response (no SOA)");
else {
SOARecord soa = (SOARecord) answers[0];
int serial = soa.getSerial();
if (serial != Integer.parseInt(expected)) {
value = new Integer(serial).toString();
flag = false;
}
}
}
else if (field.equalsIgnoreCase("tsig")) {
if (response.isSigned()) {
if (response.isVerified())
value = "ok";
else
value = "failed";
}
else
value = "unsigned";
if (!value.equalsIgnoreCase(expected))
flag = false;
}
else if ((section = Section.value(field)) >= 0) {
int count = response.getHeader().getCount(section);
if (count != Integer.parseInt(expected)) {
value = new Integer(count).toString();
flag = false;
}
}
else
print("Invalid assertion keyword: " + field);
if (flag == false) {
print("Expected " + field + " " + expected +
", received " + value);
while (true) {
Tokenizer.Token token = st.get();
if (!token.isString())
break;
print(token.value);
}
st.unget();
}
return flag;
}
static void
help(String topic) {
System.out.println();
if (topic == null)
System.out.println("The following are supported commands:\n" +
"add assert class clear date delete\n" +
"echo file glue help log key\n" +
"edns origin port prohibit query quit\n" +
"require send server show sleep tcp\n" +
"ttl zone
else if (topic.equalsIgnoreCase("add"))
System.out.println(
"add <name> [ttl] [class] <type> <data>\n\n" +
"specify a record to be added\n");
else if (topic.equalsIgnoreCase("assert"))
System.out.println(
"assert <field> <value> [msg]\n\n" +
"asserts that the value of the field in the last\n" +
"response matches the value specified. If not,\n" +
"the message is printed (if present) and the\n" +
"program exits. The field may be any of <rcode>,\n" +
"<serial>, <tsig>, <qu>, <an>, <au>, or <ad>.\n");
else if (topic.equalsIgnoreCase("class"))
System.out.println(
"class <class>\n\n" +
"class of the zone to be updated (default: IN)\n");
else if (topic.equalsIgnoreCase("clear"))
System.out.println(
"clear\n\n" +
"clears the current update packet\n");
else if (topic.equalsIgnoreCase("date"))
System.out.println(
"date [-ms]\n\n" +
"prints the current date and time in human readable\n" +
"format or as the number of milliseconds since the\n" +
"epoch");
else if (topic.equalsIgnoreCase("delete"))
System.out.println(
"delete <name> [ttl] [class] <type> <data> \n" +
"delete <name> <type> \n" +
"delete <name>\n\n" +
"specify a record or set to be deleted, or that\n" +
"all records at a name should be deleted\n");
else if (topic.equalsIgnoreCase("echo"))
System.out.println(
"echo <text>\n\n" +
"prints the text\n");
else if (topic.equalsIgnoreCase("file"))
System.out.println(
"file <file>\n\n" +
"opens the specified file as the new input source\n" +
"(- represents stdin)\n");
else if (topic.equalsIgnoreCase("glue"))
System.out.println(
"glue <name> [ttl] [class] <type> <data>\n\n" +
"specify an additional record\n");
else if (topic.equalsIgnoreCase("help"))
System.out.println(
"?/help\n" +
"help [topic]\n\n" +
"prints a list of commands or help about a specific\n" +
"command\n");
else if (topic.equalsIgnoreCase("log"))
System.out.println(
"log <file>\n\n" +
"opens the specified file and uses it to log output\n");
else if (topic.equalsIgnoreCase("key"))
System.out.println(
"key <name> <data>\n\n" +
"TSIG key used to sign messages\n");
else if (topic.equalsIgnoreCase("edns"))
System.out.println(
"edns <level>\n\n" +
"EDNS level specified when sending messages\n");
else if (topic.equalsIgnoreCase("origin"))
System.out.println(
"origin <origin>\n\n" +
"<same as zone>\n");
else if (topic.equalsIgnoreCase("port"))
System.out.println(
"port <port>\n\n" +
"UDP/TCP port messages are sent to (default: 53)\n");
else if (topic.equalsIgnoreCase("prohibit"))
System.out.println(
"prohibit <name> <type> \n" +
"prohibit <name>\n\n" +
"require that a set or name is not present\n");
else if (topic.equalsIgnoreCase("query"))
System.out.println(
"query <name> [type [class]] \n\n" +
"issues a query\n");
else if (topic.equalsIgnoreCase("q") ||
topic.equalsIgnoreCase("quit"))
System.out.println(
"q/quit\n\n" +
"quits the program\n");
else if (topic.equalsIgnoreCase("require"))
System.out.println(
"require <name> [ttl] [class] <type> <data> \n" +
"require <name> <type> \n" +
"require <name>\n\n" +
"require that a record, set, or name is present\n");
else if (topic.equalsIgnoreCase("send"))
System.out.println(
"send\n\n" +
"sends and resets the current update packet\n");
else if (topic.equalsIgnoreCase("server"))
System.out.println(
"server <name> [port]\n\n" +
"server that receives send updates/queries\n");
else if (topic.equalsIgnoreCase("show"))
System.out.println(
"show\n\n" +
"shows the current update packet\n");
else if (topic.equalsIgnoreCase("sleep"))
System.out.println(
"sleep <milliseconds>\n\n" +
"pause for interval before next command\n");
else if (topic.equalsIgnoreCase("tcp"))
System.out.println(
"tcp\n\n" +
"TCP should be used to send all messages\n");
else if (topic.equalsIgnoreCase("ttl"))
System.out.println(
"ttl <ttl>\n\n" +
"default ttl of added records (default: 0)\n");
else if (topic.equalsIgnoreCase("zone"))
System.out.println(
"zone <zone>\n\n" +
"zone to update (default: .\n");
else if (topic.equalsIgnoreCase("
System.out.println(
"# <text>\n\n" +
"a comment\n");
else
System.out.println ("Topic '" + topic + "' unrecognized\n");
}
public static void
main(String args[]) throws IOException {
InputStream in = null;
if (args.length >= 1) {
try {
in = new FileInputStream(args[0]);
}
catch (FileNotFoundException e) {
System.out.println(args[0] + " not found.");
System.exit(1);
}
}
else
in = System.in;
update u = new update(in);
}
}
|
package org.broadinstitute.sting.gatk;
import net.sf.picard.reference.IndexedFastaSequenceFile;
import net.sf.picard.reference.ReferenceSequenceFile;
import net.sf.samtools.*;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.commandline.ArgumentException;
import org.broadinstitute.sting.commandline.ArgumentSource;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.commandline.CommandLineUtils;
import org.broadinstitute.sting.commandline.ParsingEngine;
import org.broadinstitute.sting.commandline.Tags;
import org.broadinstitute.sting.gatk.arguments.GATKArgumentCollection;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID;
import org.broadinstitute.sting.gatk.datasources.reads.Shard;
import org.broadinstitute.sting.gatk.datasources.reference.ReferenceDataSource;
import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.datasources.sample.Sample;
import org.broadinstitute.sting.gatk.datasources.sample.SampleDataSource;
import org.broadinstitute.sting.gatk.datasources.reads.MonolithicShardStrategy;
import org.broadinstitute.sting.gatk.datasources.reads.ShardStrategy;
import org.broadinstitute.sting.gatk.datasources.reads.ShardStrategyFactory;
import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource;
import org.broadinstitute.sting.gatk.executive.MicroScheduler;
import org.broadinstitute.sting.gatk.filters.FilterManager;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.gatk.filters.ReadGroupBlackListFilter;
import org.broadinstitute.sting.gatk.io.OutputTracker;
import org.broadinstitute.sting.gatk.io.stubs.Stub;
import org.broadinstitute.sting.gatk.refdata.tracks.RMDTrack;
import org.broadinstitute.sting.gatk.refdata.tracks.builders.RMDTrackBuilder;
import org.broadinstitute.sting.gatk.refdata.utils.RMDIntervalGenerator;
import org.broadinstitute.sting.gatk.refdata.utils.RMDTriplet;
import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.GenomeLocSortedSet;
import org.broadinstitute.sting.utils.SequenceDictionaryUtils;
import org.broadinstitute.sting.utils.baq.BAQ;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.interval.IntervalUtils;
import java.io.File;
import java.util.*;
/**
* A GenomeAnalysisEngine that runs a specified walker.
*/
public class GenomeAnalysisEngine {
/**
* our log, which we want to capture anything from this class
*/
private static Logger logger = Logger.getLogger(GenomeAnalysisEngine.class);
/**
* The GATK command-line argument parsing code.
*/
private ParsingEngine parsingEngine;
/**
* The genomeLocParser can create and parse GenomeLocs.
*/
private GenomeLocParser genomeLocParser;
/**
* Accessor for sharded read data.
*/
private SAMDataSource readsDataSource = null;
/**
* Accessor for sharded reference data.
*/
private ReferenceDataSource referenceDataSource = null;
/**
* Accessor for sample metadata
*/
private SampleDataSource sampleDataSource = null;
/**
* Accessor for sharded reference-ordered data.
*/
private List<ReferenceOrderedDataSource> rodDataSources;
// our argument collection
private GATKArgumentCollection argCollection;
/**
* Collection of intervals used by the engine.
*/
private GenomeLocSortedSet intervals = null;
/**
* Explicitly assign the interval set to use for this traversal (for unit testing purposes)
* @param intervals set of intervals to use for this traversal
*/
public void setIntervals( GenomeLocSortedSet intervals ) {
this.intervals = intervals;
}
/**
* Collection of inputs used by the engine.
*/
private Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource, Object>();
/**
* Collection of outputs used by the engine.
*/
private Collection<Stub<?>> outputs = new ArrayList<Stub<?>>();
/**
* Collection of the filters applied to the input data.
*/
private Collection<ReadFilter> filters;
/**
* A currently hacky unique name for this GATK instance
*/
private String myName = "GATK_" + Math.abs(getRandomGenerator().nextInt());
/**
* our walker manager
*/
private final WalkerManager walkerManager = new WalkerManager();
private Walker<?, ?> walker;
public void setWalker(Walker<?, ?> walker) {
this.walker = walker;
}
/**
* A processed collection of SAM reader identifiers.
*/
private Collection<SAMReaderID> samReaderIDs = Collections.emptyList();
/**
* Set the SAM/BAM files over which to traverse.
* @param samReaderIDs Collection of ids to use during this traversal.
*/
public void setSAMFileIDs(Collection<SAMReaderID> samReaderIDs) {
this.samReaderIDs = samReaderIDs;
}
/**
* Collection of reference metadata files over which to traverse.
*/
private Collection<RMDTriplet> referenceMetaDataFiles;
/**
* Set the reference metadata files to use for this traversal.
* @param referenceMetaDataFiles Collection of files and descriptors over which to traverse.
*/
public void setReferenceMetaDataFiles(Collection<RMDTriplet> referenceMetaDataFiles) {
this.referenceMetaDataFiles = referenceMetaDataFiles;
}
/**
* Static random number generator and seed.
*/
private static final long GATK_RANDOM_SEED = 47382911L;
private static Random randomGenerator = new Random(GATK_RANDOM_SEED);
public static Random getRandomGenerator() { return randomGenerator; }
public static void resetRandomGenerator() { randomGenerator.setSeed(GATK_RANDOM_SEED); }
public static void resetRandomGenerator(long seed) { randomGenerator.setSeed(seed); }
/**
* Actually run the GATK with the specified walker.
*
* @return the value of this traversal.
*/
public Object execute() {
//HeapSizeMonitor monitor = new HeapSizeMonitor();
//monitor.start();
setStartTime(new java.util.Date());
// validate our parameters
if (this.getArguments() == null) {
throw new ReviewedStingException("The GATKArgumentCollection passed to GenomeAnalysisEngine can not be null.");
}
// validate our parameters
if (this.walker == null)
throw new ReviewedStingException("The walker passed to GenomeAnalysisEngine can not be null.");
if (this.getArguments().nonDeterministicRandomSeed)
resetRandomGenerator(System.currentTimeMillis());
// Prepare the data for traversal.
initializeDataSources();
// initialize and validate the interval list
initializeIntervals();
validateSuppliedIntervals();
// our microscheduler, which is in charge of running everything
MicroScheduler microScheduler = createMicroscheduler();
// create temp directories as necessary
initializeTempDirectory();
// create the output streams "
initializeOutputStreams(microScheduler.getOutputTracker());
ShardStrategy shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals);
// execute the microscheduler, storing the results
Object result = microScheduler.execute(this.walker, shardStrategy);
//monitor.stop();
//logger.info(String.format("Maximum heap size consumed: %d",monitor.getMaxMemoryUsed()));
return result;
}
/**
* Retrieves an instance of the walker based on the walker name.
*
* @param walkerName Name of the walker. Must not be null. If the walker cannot be instantiated, an exception will be thrown.
* @return An instance of the walker.
*/
public Walker<?, ?> getWalkerByName(String walkerName) {
return walkerManager.createByName(walkerName);
}
/**
* Gets the name of a given walker type.
* @param walkerType Type of walker.
* @return Name of the walker.
*/
public String getWalkerName(Class<? extends Walker> walkerType) {
return walkerManager.getName(walkerType);
}
public String getName() {
return myName;
}
/**
* Gets a list of the filters to associate with the given walker. Will NOT initialize the engine with this filters;
* the caller must handle that directly.
* @return A collection of available filters.
*/
public Collection<ReadFilter> createFilters() {
Set<ReadFilter> filters = new HashSet<ReadFilter>();
filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager()));
if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0)
filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList));
for(String filterName: this.getArguments().readFilters)
filters.add(this.getFilterManager().createByName(filterName));
return Collections.unmodifiableSet(filters);
}
/**
* Allow subclasses and others within this package direct access to the walker manager.
* @return The walker manager used by this package.
*/
protected WalkerManager getWalkerManager() {
return walkerManager;
}
/**
* setup a microscheduler
*
* @return a new microscheduler
*/
private MicroScheduler createMicroscheduler() {
// Temporarily require all walkers to have a reference, even if that reference is not conceptually necessary.
if ((walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker) &&
this.getArguments().referenceFile == null) {
throw new UserException.CommandLineException("Read-based traversals require a reference file but none was given");
}
return MicroScheduler.create(this,walker,this.getReadsDataSource(),this.getReferenceDataSource().getReference(),this.getRodDataSources(),this.getArguments().numberOfThreads);
}
protected DownsamplingMethod getDownsamplingMethod() {
GATKArgumentCollection argCollection = this.getArguments();
DownsamplingMethod method;
if(argCollection.getDownsamplingMethod() != null)
method = argCollection.getDownsamplingMethod();
else if(WalkerManager.getDownsamplingMethod(walker) != null)
method = WalkerManager.getDownsamplingMethod(walker);
else
method = argCollection.getDefaultDownsamplingMethod();
return method;
}
public BAQ.QualityMode getWalkerBAQQualityMode() { return WalkerManager.getBAQQualityMode(walker); }
public BAQ.ApplicationTime getWalkerBAQApplicationTime() { return WalkerManager.getBAQApplicationTime(walker); }
protected boolean generateExtendedEvents() {
return walker.generateExtendedEvents();
}
protected boolean includeReadsWithDeletionAtLoci() {
return walker.includeReadsWithDeletionAtLoci();
}
/**
* Verifies that the supplied set of reads files mesh with what the walker says it requires,
* and also makes sure that there were no duplicate SAM files specified on the command line.
*/
protected void validateSuppliedReads() {
GATKArgumentCollection arguments = this.getArguments();
// Check what the walker says is required against what was provided on the command line.
if (WalkerManager.isRequired(walker, DataSource.READS) && (arguments.samFiles == null || arguments.samFiles.size() == 0))
throw new ArgumentException("Walker requires reads but none were provided.");
// Check what the walker says is allowed against what was provided on the command line.
if ((arguments.samFiles != null && arguments.samFiles.size() > 0) && !WalkerManager.isAllowed(walker, DataSource.READS))
throw new ArgumentException("Walker does not allow reads but reads were provided.");
// Make sure no SAM files were specified multiple times by the user.
checkForDuplicateSamFiles();
}
/**
* Checks whether there are SAM files that appear multiple times in the fully unpacked list of
* SAM files (samReaderIDs). If there are, throws an ArgumentException listing the files in question.
*/
protected void checkForDuplicateSamFiles() {
Set<SAMReaderID> encounteredSamFiles = new HashSet<SAMReaderID>();
Set<String> duplicateSamFiles = new LinkedHashSet<String>();
for ( SAMReaderID samFile : samReaderIDs ) {
if ( encounteredSamFiles.contains(samFile) ) {
duplicateSamFiles.add(samFile.getSamFilePath());
}
else {
encounteredSamFiles.add(samFile);
}
}
if ( duplicateSamFiles.size() > 0 ) {
throw new ArgumentException("The following BAM files appear multiple times in the list of input files: " +
duplicateSamFiles + " BAM files may be specified at most once.");
}
}
/**
* Verifies that the supplied reference file mesh with what the walker says it requires.
*/
protected void validateSuppliedReference() {
GATKArgumentCollection arguments = this.getArguments();
// Check what the walker says is required against what was provided on the command line.
// TODO: Temporarily disabling WalkerManager.isRequired check on the reference because the reference is always required.
if (/*WalkerManager.isRequired(walker, DataSource.REFERENCE) &&*/ arguments.referenceFile == null)
throw new ArgumentException("Walker requires a reference but none was provided.");
// Check what the walker says is allowed against what was provided on the command line.
if (arguments.referenceFile != null && !WalkerManager.isAllowed(walker, DataSource.REFERENCE))
throw new ArgumentException("Walker does not allow a reference but one was provided.");
}
/**
* Verifies that all required reference-ordered data has been supplied, and any reference-ordered data that was not
* 'allowed' is still present.
*
* @param rods Reference-ordered data to load.
*/
protected void validateSuppliedReferenceOrderedData(List<ReferenceOrderedDataSource> rods) {
// Check to make sure that all required metadata is present.
List<RMD> allRequired = WalkerManager.getRequiredMetaData(walker);
for (RMD required : allRequired) {
boolean found = false;
for (ReferenceOrderedDataSource rod : rods) {
if (rod.matchesNameAndRecordType(required.name(), required.type()))
found = true;
}
if (!found)
throw new ArgumentException(String.format("Walker requires reference metadata to be supplied named '%s' of type '%s', but this metadata was not provided. " +
"Please supply the specified metadata file.", required.name(), required.type().getSimpleName()));
}
// Check to see that no forbidden rods are present.
for (ReferenceOrderedDataSource rod : rods) {
if (!WalkerManager.isAllowed(walker, rod))
throw new ArgumentException(String.format("Walker of type %s does not allow access to metadata: %s", walker.getClass(), rod.getName()));
}
}
protected void validateSuppliedIntervals() {
// Only read walkers support '-L unmapped' intervals. Trap and validate any other instances of -L unmapped.
if(!(walker instanceof ReadWalker)) {
GenomeLocSortedSet intervals = getIntervals();
if(intervals != null && getIntervals().contains(GenomeLoc.UNMAPPED))
throw new ArgumentException("Interval list specifies unmapped region. Only read walkers may include the unmapped region.");
}
// If intervals is non-null and empty at this point, it means that the list of intervals to process
// was filtered down to an empty set (eg., the user specified something like -L chr1 -XL chr1). Since
// this was very likely unintentional, the user should be informed of this. Note that this is different
// from the case where intervals == null, which indicates either that there were no interval arguments,
// or that -L all was specified.
if ( intervals != null && intervals.isEmpty() ) {
throw new ArgumentException("The given combination of -L and -XL options results in an empty set. " +
"No intervals to process.");
}
}
/**
* Get the sharding strategy given a driving data source.
*
* @param drivingDataSource Data on which to shard.
* @return the sharding strategy
*/
protected ShardStrategy getShardStrategy(SAMDataSource readsDataSource, ReferenceSequenceFile drivingDataSource, GenomeLocSortedSet intervals) {
ValidationExclusion exclusions = (readsDataSource != null ? readsDataSource.getReadsInfo().getValidationExclusionList() : null);
ReferenceDataSource referenceDataSource = this.getReferenceDataSource();
// Use monolithic sharding if no index is present. Monolithic sharding is always required for the original
// sharding system; it's required with the new sharding system only for locus walkers.
if(readsDataSource != null && !readsDataSource.hasIndex() ) {
if(!exclusions.contains(ValidationExclusion.TYPE.ALLOW_UNINDEXED_BAM))
throw new UserException.CommandLineException("Cannot process the provided BAM file(s) because they were not indexed. The GATK does offer limited processing of unindexed BAMs in --unsafe mode, but this GATK feature is currently unsupported.");
if(intervals != null && !argCollection.allowIntervalsWithUnindexedBAM)
throw new UserException.CommandLineException("Cannot perform interval processing when reads are present but no index is available.");
Shard.ShardType shardType;
if(walker instanceof LocusWalker) {
if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
shardType = Shard.ShardType.LOCUS;
}
else if(walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker)
shardType = Shard.ShardType.READ;
else
throw new UserException.CommandLineException("The GATK cannot currently process unindexed BAM files");
List<GenomeLoc> region;
if(intervals != null)
region = intervals.toList();
else {
region = new ArrayList<GenomeLoc>();
for(SAMSequenceRecord sequenceRecord: drivingDataSource.getSequenceDictionary().getSequences())
region.add(getGenomeLocParser().createGenomeLoc(sequenceRecord.getSequenceName(),1,sequenceRecord.getSequenceLength()));
}
return new MonolithicShardStrategy(getGenomeLocParser(), readsDataSource,shardType,region);
}
ShardStrategy shardStrategy = null;
ShardStrategyFactory.SHATTER_STRATEGY shardType;
long SHARD_SIZE = 100000L;
if (walker instanceof LocusWalker) {
if (walker instanceof RodWalker) SHARD_SIZE *= 1000;
if (intervals != null && !intervals.isEmpty()) {
if(!readsDataSource.isEmpty() && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
shardStrategy = ShardStrategyFactory.shatter(readsDataSource,
referenceDataSource.getReference(),
ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL,
drivingDataSource.getSequenceDictionary(),
SHARD_SIZE,
getGenomeLocParser(),
intervals);
} else
shardStrategy = ShardStrategyFactory.shatter(readsDataSource,
referenceDataSource.getReference(),
ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL,
drivingDataSource.getSequenceDictionary(),
SHARD_SIZE,getGenomeLocParser());
} else if (walker instanceof ReadWalker ||
walker instanceof DuplicateWalker) {
shardType = ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL;
if (intervals != null && !intervals.isEmpty()) {
shardStrategy = ShardStrategyFactory.shatter(readsDataSource,
referenceDataSource.getReference(),
shardType,
drivingDataSource.getSequenceDictionary(),
SHARD_SIZE,
getGenomeLocParser(),
intervals);
} else {
shardStrategy = ShardStrategyFactory.shatter(readsDataSource,
referenceDataSource.getReference(),
shardType,
drivingDataSource.getSequenceDictionary(),
SHARD_SIZE,
getGenomeLocParser());
}
} else if (walker instanceof ReadPairWalker) {
if(readsDataSource != null && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.queryname)
throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.queryname, "Read pair walkers can only walk over query name-sorted data. Please resort your input BAM file.");
if(intervals != null && !intervals.isEmpty())
throw new UserException.CommandLineException("Pairs traversal cannot be used in conjunction with intervals.");
shardStrategy = ShardStrategyFactory.shatter(readsDataSource,
referenceDataSource.getReference(),
ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL,
drivingDataSource.getSequenceDictionary(),
SHARD_SIZE,
getGenomeLocParser());
} else
throw new ReviewedStingException("Unable to support walker of type" + walker.getClass().getName());
return shardStrategy;
}
protected boolean flashbackData() {
return walker instanceof ReadWalker;
}
/**
* Create the temp directory if it doesn't exist.
*/
private void initializeTempDirectory() {
File tempDir = new File(System.getProperty("java.io.tmpdir"));
tempDir.mkdirs();
}
/**
* Initialize the output streams as specified by the user.
*
* @param outputTracker the tracker supplying the initialization data.
*/
private void initializeOutputStreams(OutputTracker outputTracker) {
for (Map.Entry<ArgumentSource, Object> input : getInputs().entrySet())
outputTracker.addInput(input.getKey(), input.getValue());
for (Stub<?> stub : getOutputs())
outputTracker.addOutput(stub);
outputTracker.prepareWalker(walker, getArguments().strictnessLevel);
}
public ReferenceDataSource getReferenceDataSource() {
return referenceDataSource;
}
public GenomeLocParser getGenomeLocParser() {
return genomeLocParser;
}
/**
* Manage lists of filters.
*/
private final FilterManager filterManager = new FilterManager();
private Date startTime = null; // the start time for execution
public void setParser(ParsingEngine parsingEngine) {
this.parsingEngine = parsingEngine;
}
/**
* Explicitly set the GenomeLocParser, for unit testing.
* @param genomeLocParser GenomeLocParser to use.
*/
public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
this.genomeLocParser = genomeLocParser;
}
/**
* Sets the start time when the execute() function was last called
* @param startTime the start time when the execute() function was last called
*/
protected void setStartTime(Date startTime) {
this.startTime = startTime;
}
/**
* @return the start time when the execute() function was last called
*/
public Date getStartTime() {
return startTime;
}
/**
* Setup the intervals to be processed
*/
protected void initializeIntervals() {
// return if no interval arguments at all
if ((argCollection.intervals == null) && (argCollection.excludeIntervals == null) && (argCollection.RODToInterval == null))
return;
// if '-L all' was specified, verify that it was the only -L specified and return if so.
if(argCollection.intervals != null) {
for(String interval: argCollection.intervals) {
if(interval.trim().equals("all")) {
if(argCollection.intervals.size() > 1)
throw new UserException("'-L all' was specified along with other intervals or interval lists; the GATK cannot combine '-L all' with other intervals.");
// '-L all' was specified and seems valid. Return.
return;
}
}
}
// if include argument isn't given, create new set of all possible intervals
GenomeLocSortedSet includeSortedSet = (argCollection.intervals == null && argCollection.RODToInterval == null ?
GenomeLocSortedSet.createSetFromSequenceDictionary(this.referenceDataSource.getReference().getSequenceDictionary()) :
loadIntervals(argCollection.intervals, IntervalUtils.mergeIntervalLocations(getRODIntervals(), argCollection.intervalMerging)));
// if no exclude arguments, can return parseIntervalArguments directly
if (argCollection.excludeIntervals == null)
intervals = includeSortedSet;
// otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets
else {
GenomeLocSortedSet excludeSortedSet = loadIntervals(argCollection.excludeIntervals, null);
intervals = includeSortedSet.subtractRegions(excludeSortedSet);
// logging messages only printed when exclude (-XL) arguments are given
long toPruneSize = includeSortedSet.coveredSize();
long toExcludeSize = excludeSortedSet.coveredSize();
long intervalSize = intervals.coveredSize();
logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize));
logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)",
toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize)));
}
}
/**
* Loads the intervals relevant to the current execution
* @param argList String representation of arguments; might include 'all', filenames, intervals in samtools
* notation, or a combination of the above
* @param rodIntervals a list of ROD intervals to add to the returned set. Can be empty or null.
* @return A sorted, merged list of all intervals specified in this arg list.
*/
protected GenomeLocSortedSet loadIntervals( List<String> argList, List<GenomeLoc> rodIntervals ) {
boolean allowEmptyIntervalList = (argCollection.unsafe == ValidationExclusion.TYPE.ALLOW_EMPTY_INTERVAL_LIST ||
argCollection.unsafe == ValidationExclusion.TYPE.ALL);
List<GenomeLoc> nonRODIntervals = IntervalUtils.parseIntervalArguments(genomeLocParser, argList, allowEmptyIntervalList);
List<GenomeLoc> allIntervals = IntervalUtils.mergeListsBySetOperator(rodIntervals, nonRODIntervals, argCollection.BTIMergeRule);
return IntervalUtils.sortAndMergeIntervals(genomeLocParser, allIntervals, argCollection.intervalMerging);
}
/**
* if we have a ROD specified as a 'rodToIntervalTrackName', convert its records to RODs
* @return ROD intervals as GenomeLocs
*/
private List<GenomeLoc> getRODIntervals() {
Map<String, ReferenceOrderedDataSource> rodNames = RMDIntervalGenerator.getRMDTrackNames(rodDataSources);
// Do we have any RODs that overloaded as interval lists with the 'rodToIntervalTrackName' flag?
List<GenomeLoc> ret = new ArrayList<GenomeLoc>();
if (rodNames != null && argCollection.RODToInterval != null) {
String rodName = argCollection.RODToInterval;
// check to make sure we have a rod of that name
if (!rodNames.containsKey(rodName))
throw new UserException.CommandLineException("--rodToIntervalTrackName (-BTI) was passed the name '"+rodName+"', which wasn't given as a ROD name in the -B option");
for (String str : rodNames.keySet())
if (str.equals(rodName)) {
logger.info("Adding interval list from track (ROD) named " + rodName);
RMDIntervalGenerator intervalGenerator = new RMDIntervalGenerator(rodNames.get(str));
ret.addAll(intervalGenerator.toGenomeLocList());
}
}
return ret;
}
/**
* Add additional, externally managed IO streams for inputs.
*
* @param argumentSource Field into which to inject the value.
* @param value Instance to inject.
*/
public void addInput(ArgumentSource argumentSource, Object value) {
inputs.put(argumentSource, value);
}
/**
* Add additional, externally managed IO streams for output.
*
* @param stub Instance to inject.
*/
public void addOutput(Stub<?> stub) {
outputs.add(stub);
}
/**
* Returns the tag associated with a given command-line argument.
* @param key Object for which to inspect the tag.
* @return Tags object associated with the given key, or an empty Tag structure if none are present.
*/
public Tags getTags(Object key) {
return parsingEngine.getTags(key);
}
protected void initializeDataSources() {
logger.info("Strictness is " + argCollection.strictnessLevel);
// TODO -- REMOVE ME
BAQ.DEFAULT_GOP = argCollection.BAQGOP;
validateSuppliedReference();
setReferenceDataSource(argCollection.referenceFile);
validateSuppliedReads();
readsDataSource = createReadsDataSource(argCollection,genomeLocParser,referenceDataSource.getReference());
sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles);
for (ReadFilter filter : filters)
filter.initialize(this);
sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles);
// set the sequence dictionary of all of Tribble tracks to the sequence dictionary of our reference
rodDataSources = getReferenceOrderedDataSources(referenceMetaDataFiles,referenceDataSource.getReference().getSequenceDictionary(),genomeLocParser,argCollection.unsafe);
}
/**
* Gets a unique identifier for the reader sourcing this read.
* @param read Read to examine.
* @return A unique identifier for the source file of this read. Exception if not found.
*/
public SAMReaderID getReaderIDForRead(final SAMRecord read) {
return getReadsDataSource().getReaderID(read);
}
/**
* Gets the source file for this read.
* @param id Unique identifier determining which input file to use.
* @return The source filename for this read.
*/
public File getSourceFileForReaderID(final SAMReaderID id) {
return getReadsDataSource().getSAMFile(id);
}
/**
* Returns sets of samples present in the (merged) input SAM stream, grouped by readers (i.e. underlying
* individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list
* returned by this method will contain 3 elements (one for each reader), with each element being a set of sample names
* found in the corresponding bam file.
*
* @return Sets of samples in the merged input SAM stream, grouped by readers
*/
public List<Set<String>> getSamplesByReaders() {
Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs();
List<Set<String>> sample_sets = new ArrayList<Set<String>>(readers.size());
for (SAMReaderID r : readers) {
Set<String> samples = new HashSet<String>(1);
sample_sets.add(samples);
for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) {
samples.add(g.getSample());
}
}
return sample_sets;
}
/**
* Returns sets of libraries present in the (merged) input SAM stream, grouped by readers (i.e. underlying
* individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list
* returned by this method will contain 3 elements (one for each reader), with each element being a set of library names
* found in the corresponding bam file.
*
* @return Sets of libraries present in the (merged) input SAM stream, grouped by readers
*/
public List<Set<String>> getLibrariesByReaders() {
Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs();
List<Set<String>> lib_sets = new ArrayList<Set<String>>(readers.size());
for (SAMReaderID r : readers) {
Set<String> libs = new HashSet<String>(2);
lib_sets.add(libs);
for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) {
libs.add(g.getLibrary());
}
}
return lib_sets;
}
public List<Set<String>> getMergedReadGroupsByReaders() {
Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs();
List<Set<String>> rg_sets = new ArrayList<Set<String>>(readers.size());
for (SAMReaderID r : readers) {
Set<String> groups = new HashSet<String>(5);
rg_sets.add(groups);
for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) {
if (getReadsDataSource().hasReadGroupCollisions()) { // Check if there were read group clashes with hasGroupIdDuplicates and if so:
// use HeaderMerger to translate original read group id from the reader into the read group id in the
// merged stream, and save that remapped read group id to associate it with specific reader
groups.add(getReadsDataSource().getReadGroupId(r, g.getReadGroupId()));
} else {
// otherwise, pass through the unmapped read groups since this is what Picard does as well
groups.add(g.getReadGroupId());
}
}
}
return rg_sets;
}
/**
* Now that all files are open, validate the sequence dictionaries of the reads vs. the reference vrs the reference ordered data (if available).
*
* @param reads Reads data source.
* @param reference Reference data source.
* @param rods a collection of the reference ordered data tracks
*/
private void validateSourcesAgainstReference(SAMDataSource reads, ReferenceSequenceFile reference, Collection<ReferenceOrderedDataSource> rods, RMDTrackBuilder manager) {
if ((reads.isEmpty() && (rods == null || rods.isEmpty())) || reference == null )
return;
// Compile a set of sequence names that exist in the reference file.
SAMSequenceDictionary referenceDictionary = reference.getSequenceDictionary();
if (!reads.isEmpty()) {
// Compile a set of sequence names that exist in the BAM files.
SAMSequenceDictionary readsDictionary = reads.getHeader().getSequenceDictionary();
if (readsDictionary.size() == 0) {
logger.info("Reads file is unmapped. Skipping validation against reference.");
return;
}
// compare the reads to the reference
SequenceDictionaryUtils.validateDictionaries(logger, getArguments().unsafe, "reads", readsDictionary, "reference", referenceDictionary);
}
for (ReferenceOrderedDataSource rod : rods)
manager.validateTrackSequenceDictionary(rod.getName(),rod.getSequenceDictionary(),referenceDictionary);
}
/**
* Gets a data source for the given set of reads.
*
* @return A data source for the given set of reads.
*/
private SAMDataSource createReadsDataSource(GATKArgumentCollection argCollection, GenomeLocParser genomeLocParser, IndexedFastaSequenceFile refReader) {
DownsamplingMethod method = getDownsamplingMethod();
if ( getWalkerBAQApplicationTime() == BAQ.ApplicationTime.FORBIDDEN && argCollection.BAQMode != BAQ.CalculationMode.OFF)
throw new UserException.BadArgumentValue("baq", "Walker cannot accept BAQ'd base qualities, and yet BAQ mode " + argCollection.BAQMode + " was requested.");
SAMDataSource dataSource = new SAMDataSource(
samReaderIDs,
genomeLocParser,
argCollection.useOriginalBaseQualities,
argCollection.strictnessLevel,
argCollection.readBufferSize,
method,
new ValidationExclusion(Arrays.asList(argCollection.unsafe)),
filters,
includeReadsWithDeletionAtLoci(),
generateExtendedEvents(),
getWalkerBAQApplicationTime() == BAQ.ApplicationTime.ON_INPUT ? argCollection.BAQMode : BAQ.CalculationMode.OFF,
getWalkerBAQQualityMode(),
refReader,
argCollection.defaultBaseQualities,
!argCollection.disableLowMemorySharding);
return dataSource;
}
/**
* Opens a reference sequence file paired with an index. Only public for testing purposes
*
* @param refFile Handle to a reference sequence file. Non-null.
* @return A thread-safe file wrapper.
*/
public void setReferenceDataSource(File refFile) {
this.referenceDataSource = new ReferenceDataSource(refFile);
genomeLocParser = new GenomeLocParser(referenceDataSource.getReference());
}
/**
* Open the reference-ordered data sources.
*
* @param referenceMetaDataFiles collection of RMD descriptors to load and validate.
* @param sequenceDictionary GATK-wide sequnce dictionary to use for validation.
* @param genomeLocParser to use when creating and validating GenomeLocs.
* @param validationExclusionType potentially indicate which validations to include / exclude.
*
* @return A list of reference-ordered data sources.
*/
private List<ReferenceOrderedDataSource> getReferenceOrderedDataSources(Collection<RMDTriplet> referenceMetaDataFiles,
SAMSequenceDictionary sequenceDictionary,
GenomeLocParser genomeLocParser,
ValidationExclusion.TYPE validationExclusionType) {
RMDTrackBuilder builder = new RMDTrackBuilder(sequenceDictionary,genomeLocParser,validationExclusionType);
// try and make the tracks given their requests
// create of live instances of the tracks
List<RMDTrack> tracks = new ArrayList<RMDTrack>();
List<ReferenceOrderedDataSource> dataSources = new ArrayList<ReferenceOrderedDataSource>();
for (RMDTriplet fileDescriptor : referenceMetaDataFiles)
dataSources.add(new ReferenceOrderedDataSource(fileDescriptor,
builder,
sequenceDictionary,
genomeLocParser,
flashbackData()));
// validation: check to make sure everything the walker needs is present, and that all sequence dictionaries match.
validateSuppliedReferenceOrderedData(dataSources);
validateSourcesAgainstReference(readsDataSource, referenceDataSource.getReference(), dataSources, builder);
return dataSources;
}
/**
* Returns the SAM File Header from the input reads' data source file
* @return the SAM File Header from the input reads' data source file
*/
public SAMFileHeader getSAMFileHeader() {
return readsDataSource.getHeader();
}
/**
* Returns the unmerged SAM file header for an individual reader.
* @param reader The reader.
* @return Header for that reader.
*/
public SAMFileHeader getSAMFileHeader(SAMReaderID reader) {
return readsDataSource.getHeader(reader);
}
/**
* Returns data source object encapsulating all essential info and handlers used to traverse
* reads; header merger, individual file readers etc can be accessed through the returned data source object.
*
* @return the reads data source
*/
public SAMDataSource getReadsDataSource() {
return this.readsDataSource;
}
/**
* Sets the collection of GATK main application arguments.
*
* @param argCollection the GATK argument collection
*/
public void setArguments(GATKArgumentCollection argCollection) {
this.argCollection = argCollection;
}
/**
* Gets the collection of GATK main application arguments.
*
* @return the GATK argument collection
*/
public GATKArgumentCollection getArguments() {
return this.argCollection;
}
/**
* Get the list of intervals passed to the engine.
* @return List of intervals.
*/
public GenomeLocSortedSet getIntervals() {
return this.intervals;
}
/**
* Gets the list of filters employed by this engine.
* @return Collection of filters (actual instances) used by this engine.
*/
public Collection<ReadFilter> getFilters() {
return this.filters;
}
/**
* Sets the list of filters employed by this engine.
* @param filters Collection of filters (actual instances) used by this engine.
*/
public void setFilters(Collection<ReadFilter> filters) {
this.filters = filters;
}
/**
* Gets the filter manager for this engine.
* @return filter manager for this engine.
*/
protected FilterManager getFilterManager() {
return filterManager;
}
/**
* Gets the input sources for this engine.
* @return input sources for this engine.
*/
protected Map<ArgumentSource, Object> getInputs() {
return inputs;
}
/**
* Gets the output stubs for this engine.
* @return output stubs for this engine.
*/
protected Collection<Stub<?>> getOutputs() {
return outputs;
}
/**
* Returns data source objects encapsulating all rod data;
* individual rods can be accessed through the returned data source objects.
*
* @return the rods data sources
*/
public List<ReferenceOrderedDataSource> getRodDataSources() {
return this.rodDataSources;
}
/**
* Gets cumulative metrics about the entire run to this point.
* Returns a clone of this snapshot in time.
* @return cumulative metrics about the entire run at this point. ReadMetrics object is a unique instance and is
* owned by the caller; the caller can do with the object what they wish.
*/
public ReadMetrics getCumulativeMetrics() {
return readsDataSource == null ? null : readsDataSource.getCumulativeReadMetrics();
}
public SampleDataSource getSampleMetadata() {
return this.sampleDataSource;
}
/**
* Get a sample by its ID
* If an alias is passed in, return the main sample object
* @param id sample id
* @return sample Object with this ID
*/
public Sample getSampleById(String id) {
return sampleDataSource.getSampleById(id);
}
/**
* Get the sample for a given read group
* Must first look up ID for read group
* @param readGroup of sample
* @return sample object with ID from the read group
*/
public Sample getSampleByReadGroup(SAMReadGroupRecord readGroup) {
return sampleDataSource.getSampleByReadGroup(readGroup);
}
/**
* Get a sample for a given read
* Must first look up read group, and then sample ID for that read group
* @param read of sample
* @return sample object of this read
*/
public Sample getSampleByRead(SAMRecord read) {
return getSampleByReadGroup(read.getReadGroup());
}
/**
* Get number of sample objects
* @return size of samples map
*/
public int sampleCount() {
return sampleDataSource.sampleCount();
}
/**
* Return all samples with a given family ID
* Note that this isn't terribly efficient (linear) - it may be worth adding a new family ID data structure for this
* @param familyId family ID
* @return Samples with the given family ID
*/
public Set<Sample> getFamily(String familyId) {
return sampleDataSource.getFamily(familyId);
}
/**
* Returns all children of a given sample
* See note on the efficiency of getFamily() - since this depends on getFamily() it's also not efficient
* @param sample parent sample
* @return children of the given sample
*/
public Set<Sample> getChildren(Sample sample) {
return sampleDataSource.getChildren(sample);
}
/**
* Gets all the samples
* @return
*/
public Collection<Sample> getSamples() {
return sampleDataSource.getSamples();
}
/**
* Takes a list of sample names and returns their corresponding sample objects
*
* @param sampleNameList List of sample names
* @return Corresponding set of samples
*/
public Set<Sample> getSamples(Collection<String> sampleNameList) {
return sampleDataSource.getSamples(sampleNameList);
}
/**
* Returns a set of samples that have any value (which could be null) for a given property
* @param key Property key
* @return Set of samples with the property
*/
public Set<Sample> getSamplesWithProperty(String key) {
return sampleDataSource.getSamplesWithProperty(key);
}
/**
* Returns a set of samples that have a property with a certain value
* Value must be a string for now - could add a similar method for matching any objects in the future
*
* @param key Property key
* @param value String property value
* @return Set of samples that match key and value
*/
public Set<Sample> getSamplesWithProperty(String key, String value) {
return sampleDataSource.getSamplesWithProperty(key, value);
}
/**
* Returns a set of sample objects for the sample names in a variant context
*
* @param context Any variant context
* @return a set of the sample objects
*/
public Set<Sample> getSamplesByVariantContext(VariantContext context) {
Set<Sample> samples = new HashSet<Sample>();
for (String sampleName : context.getSampleNames()) {
samples.add(sampleDataSource.getOrCreateSample(sampleName));
}
return samples;
}
/**
* Returns all samples that were referenced in the SAM file
*/
public Set<Sample> getSAMFileSamples() {
return sampleDataSource.getSAMFileSamples();
}
/**
* Return a subcontext restricted to samples with a given property key/value
* Gets the sample names from key/value and relies on VariantContext.subContextFromGenotypes for the filtering
* @param context VariantContext to filter
* @param key property key
* @param value property value (must be string)
* @return subcontext
*/
public VariantContext subContextFromSampleProperty(VariantContext context, String key, String value) {
return sampleDataSource.subContextFromSampleProperty(context, key, value);
}
public Map<String,String> getApproximateCommandLineArguments(Object... argumentProviders) {
return CommandLineUtils.getApproximateCommandLineArguments(parsingEngine,argumentProviders);
}
public String createApproximateCommandLineArgumentString(Object... argumentProviders) {
return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders);
}
}
|
package org.broadinstitute.sting.utils;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.Cigar;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Arrays;
public class SWPairwiseAlignment {
private int alignment_offset; // offset of s2 w/respect to s1
private Cigar alignmentCigar;
private final double w_match;
private final double w_mismatch;
private final double w_open;
private final double w_extend;
private static final int MSTATE = 0;
private static final int ISTATE = 1;
private static final int DSTATE = 2;
// private double [] best_gap_v ;
// private int [] gap_size_v ;
// private double [] best_gap_h ;
// private int [] gap_size_h ;
// private static double [][] sw = new double[500][500];
// private static int [][] btrack = new int[500][500];
public SWPairwiseAlignment(byte[] seq1, byte[] seq2, double match, double mismatch, double open, double extend ) {
w_match = match;
w_mismatch = mismatch;
w_open = open;
w_extend = extend;
align(seq1,seq2);
}
public SWPairwiseAlignment(byte[] seq1, byte[] seq2) {
this(seq1,seq2,1.0,-1.0/3.0,-1.0-1.0/3.0,-1.0/3.0); // match=1, mismatch = -1/3, gap=-(1+k/3)
}
public Cigar getCigar() { return alignmentCigar ; }
public int getAlignmentStart2wrt1() { return alignment_offset; }
public void align(final byte[] a, final byte[] b) {
final int n = a.length;
final int m = b.length;
double [] sw = new double[(n+1)*(m+1)];
int [] btrack = new int[(n+1)*(m+1)];
// best_gap_v = new double[m+1];
// Arrays.fill(best_gap_v,-1.0e40);
// gap_size_v = new int[m+1];
// best_gap_h = new double[n+1];
// Arrays.fill(best_gap_h,-1.0e40);
// gap_size_h = new int[n+1];
calculateMatrix(a, b, sw, btrack);
calculateCigar(n, m, sw, btrack); // length of the segment (continuous matches, insertions or deletions)
}
private void calculateMatrix(final byte[] a, final byte[] b, double [] sw, int [] btrack ) {
final int n = a.length+1;
final int m = b.length+1;
double [] best_gap_v = new double[m+1];
Arrays.fill(best_gap_v,-1.0e40);
int [] gap_size_v = new int[m+1];
double [] best_gap_h = new double[n+1];
Arrays.fill(best_gap_h,-1.0e40);
int [] gap_size_h = new int[n+1];
// build smith-waterman matrix and keep backtrack info:
for ( int i = 1, row_offset_1 = 0 ; i < n ; i++ ) { // we do NOT update row_offset_1 here, see comment at the end of this outer loop
byte a_base = a[i-1]; // letter in a at the current pos
final int row_offset = row_offset_1 + m;
// On the entrance into the loop, row_offset_1 is the (linear) offset
// of the first element of row (i-1) and row_offset is the linear offset of the
// start of row i
for ( int j = 1, data_offset_1 = row_offset_1 ; j < m ; j++, data_offset_1++ ) {
// data_offset_1 is linearized offset of element [i-1][j-1]
final byte b_base = b[j-1]; // letter in b at the current pos
// in other words, step_diag = sw[i-1][j-1] + wd(a_base,b_base);
double step_diag = sw[data_offset_1] + wd(a_base,b_base);
// optimized "traversal" of all the matrix cells above the current one (i.e. traversing
// all 'step down' events that would end in the current cell. The optimized code
// does exactly the same thing as the commented out loop below. IMPORTANT:
// the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
// if a gap (length 1) was just opened above, this is the cost of arriving to the current cell:
double prev_gap = sw[data_offset_1+1]+w_open;
best_gap_v[j] += w_extend; // for the gaps that were already opened earlier, extending them by 1 costs w_extend
if ( prev_gap > best_gap_v[j] ) {
// opening a gap just before the current cell results in better score than extending by one
// the best previously opened gap. This will hold for ALL cells below: since any gap
// once opened always costs w_extend to extend by another base, we will always get a better score
// by arriving to any cell below from the gap we just opened (prev_gap) rather than from the previous best gap
best_gap_v[j] = prev_gap;
gap_size_v[j] = 1; // remember that the best step-down gap from above has length 1 (we just opened it)
} else {
// previous best gap is still the best, even after extension by another base, so we just record that extension:
gap_size_v[j]++;
}
final double step_down = best_gap_v[j] ;
final int kd = gap_size_v[j];
/*
for ( int k = 1, data_offset_k = data_offset_1+1 ; k < i ; k++, data_offset_k -= m ) {
// data_offset_k is linearized offset of element [i-k][j]
// in other words, trial = sw[i-k][j]+gap_penalty:
final double trial = sw[data_offset_k]+wk(k);
if ( step_down < trial ) {
step_down=trial;
kd = k;
}
}
*/
// optimized "traversal" of all the matrix cells to the left of the current one (i.e. traversing
// all 'step right' events that would end in the current cell. The optimized code
// does exactly the same thing as the commented out loop below. IMPORTANT:
// the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
final int data_offset = row_offset + j; // linearized offset of element [i][j]
prev_gap = sw[data_offset-1]+w_open; // what would it cost us to open length 1 gap just to the left from current cell
best_gap_h[i] += w_extend; // previous best gap would cost us that much if extended by another base
if ( prev_gap > best_gap_h[i] ) {
// newly opened gap is better (score-wise) than any previous gap with the same row index i; since
// gap penalty is linear with k, this new gap location is going to remain better than any previous ones
best_gap_h[i] = prev_gap;
gap_size_h[i] = 1;
} else {
gap_size_h[i]++;
}
final double step_right = best_gap_h[i];
final int ki = gap_size_h[i];
/*
for ( int k = 1, data_offset = row_offset+j-1 ; k < j ; k++, data_offset-- ) {
// data_offset is linearized offset of element [i][j-k]
// in other words, step_right=sw[i][j-k]+gap_penalty;
final double trial = sw[data_offset]+wk(k);
if ( step_right < trial ) {
step_right=trial;
ki = k;
}
}
final int data_offset = row_offset + j; // linearized offset of element [i][j]
*/
if ( step_down > step_right ) {
if ( step_down > step_diag ) {
sw[data_offset] = Math.max(0,step_down);
btrack[data_offset] = kd ; // positive=vertical
} else {
sw[data_offset] = Math.max(0,step_diag);
btrack[data_offset] = 0; // 0 = diagonal
}
} else {
// step_down <= step_right
if ( step_right > step_diag ) {
sw[data_offset] = Math.max(0,step_right);
btrack[data_offset] = -ki; // negative = horizontal
} else {
sw[data_offset] = Math.max(0,step_diag);
btrack[data_offset] = 0; // 0 = diagonal
}
}
// sw[data_offset] = Math.max(0, Math.max(step_diag,Math.max(step_down,step_right)));
}
// IMPORTANT, IMPORTANT, IMPORTANT:
// note that we update this (secondary) outer loop variable here,
// so that we DO NOT need to update it
// in the for() statement itself.
row_offset_1 = row_offset;
}
// print(sw,a,b);
}
private void calculateCigar(int n, int m, double [] sw, int [] btrack) {
// p holds the position we start backtracking from; we will be assembling a cigar in the backwards order
//PrimitivePair.Int p = new PrimitivePair.Int();
int p1 = 0, p2 = 0;
double maxscore = 0.0;
int segment_length = 0; // length of the segment (continuous matches, insertions or deletions)
// look for largest score. we use >= combined with the traversal direction
// to ensure that if two scores are equal, the one closer to diagonal gets picked
for ( int i = 1, data_offset = m+1+m ; i < n+1 ; i++, data_offset += (m+1) ) {
// data_offset is the offset of [i][m]
if ( sw[data_offset] >= maxscore ) {
p1 = i; p2 = m ; maxscore = sw[data_offset];
}
}
for ( int j = 1, data_offset = n*(m+1)+1 ; j < m+1 ; j++, data_offset++ ) {
// data_offset is the offset of [n][j]
if ( sw[data_offset] > maxscore || sw[data_offset] == maxscore && Math.abs(n-j) < Math.abs(p1 - p2)) {
p1 = n;
p2 = j ;
// maxscore = sw[n][j];
maxscore = sw[data_offset];
segment_length = m - j ; // end of sequence 2 is overhanging; we will just record it as 'M' segment
}
}
// we will be placing all insertions and deletions into sequence b, so the state are named w/regard
// to that sequence
int state = MSTATE;
List<CigarElement> lce = new ArrayList<CigarElement>(5);
int data_offset = p1*(m+1)+p2; // offset of element [p1][p2]
do {
// int btr = btrack[p1][p2];
int btr = btrack[data_offset];
int step_left = ( btr < 0 ? -btr : 1);
int step_up = ( btr > 0 ? btr : 1 );
int new_state;
if ( btr > 0 ) new_state = DSTATE;
else if ( btr < 0 ) new_state = ISTATE;
else new_state = MSTATE;
int step_length = 1;
// move to next best location in the sw matrix:
switch( new_state ) {
case MSTATE: data_offset -= (m+2); break; // equivalent to p1--; p2--
case ISTATE: data_offset -= step_left; step_length = step_left; break; // equivalent to p2-=step_left;
case DSTATE: data_offset -= (m+1)*step_up; step_length = step_up; break; // equivalent to p1 -= step_up
}
// now let's see if the state actually changed:
if ( new_state == state ) segment_length+=step_length;
else {
// state changed, lets emit previous segment, whatever it was (Insertion Deletion, or (Mis)Match).
lce.add(makeElement(state, segment_length));
segment_length = step_length;
state = new_state;
}
// next condition is equivalent to while ( sw[p1][p2] != 0 ) (with modified p1 and/or p2:
} while ( sw[data_offset] != 0 );
// reinstate last values of p1, p2 we arrived to after matrix traversal:
p1 = data_offset / (m+1);
p2 = data_offset % (m+1);
// post-process the last segment we are still keeping
lce.add(makeElement(state, segment_length + p2));
alignment_offset = p1 - p2;
Collections.reverse(lce);
alignmentCigar = new Cigar(lce);
}
private CigarElement makeElement(int state, int segment_length) {
CigarOperator o = null;
switch(state) {
case MSTATE: o = CigarOperator.M; break;
case ISTATE: o = CigarOperator.I; break;
case DSTATE: o = CigarOperator.D; break;
}
return new CigarElement(segment_length,o);
}
private double wd(byte x, byte y) {
return (x == y ? w_match : w_mismatch);
}
private double wk(int k) {
return w_open+(k-1)*w_extend; // gap
}
private void print(int[][] s) {
for ( int i = 0 ; i < s.length ; i++) {
for ( int j = 0; j < s[i].length ; j++ ) {
System.out.printf(" %4d",s[i][j]);
}
System.out.println();
}
}
private void print(double[][] s) {
for ( int i = 0 ; i < s.length ; i++) {
for ( int j = 0; j < s[i].length ; j++ ) {
System.out.printf(" %4g",s[i][j]);
}
System.out.println();
}
}
private void print(int[][] s, String a, String b) {
System.out.print(" ");
for ( int j = 1 ; j < s[0].length ; j++) System.out.printf(" %4c",b.charAt(j-1)) ;
System.out.println();
for ( int i = 0 ; i < s.length ; i++) {
if ( i > 0 ) System.out.print(a.charAt(i-1));
else System.out.print(' ');
System.out.print(" ");
for ( int j = 0; j < s[i].length ; j++ ) {
System.out.printf(" %4d",s[i][j]);
}
System.out.println();
}
}
private void print(double[][] s, String a, String b) {
System.out.print("");
for ( int j = 1 ; j < s[0].length ; j++) System.out.printf(" %4c",b.charAt(j-1)) ;
System.out.println();
for ( int i = 0 ; i < s.length ; i++) {
if ( i > 0 ) System.out.print(a.charAt(i-1));
else System.out.print(' ');
System.out.print(" ");
for ( int j = 0; j < s[i].length ; j++ ) {
System.out.printf(" %2.1f",s[i][j]);
}
System.out.println();
}
}
}
|
package com.andrew.Service;
import com.alibaba.druid.util.StringUtils;
import com.andrew.Common.ResultType;
import com.andrew.Model.ParamRuleModel;
import com.googlecode.aviator.AviatorEvaluator;
import com.googlecode.aviator.Expression;
import com.googlecode.aviator.exception.ExpressionRuntimeException;
import com.googlecode.aviator.exception.ExpressionSyntaxErrorException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class RuleCheckService {
private String SELECT_RULES_BY_TYPE="SELECT ID,TYPE, NAME, RULE, RESULT_TYPE, REQUIRED, FIX_RULE FROM CMDB.PARAM_RULES WHERE TYPE=?";
private String SELECT_DSM_DBCFG="SELECT NAME,VALUE,VALUE_FLAGS FROM IBMIOCM.DB2LUW_DBCFG WHERE IBMIOCM_DATABASE=?";
private String SELECT_DSM_DBMCFG="SELECT NAME,VALUE,VALUE_FLAGS FROM IBMIOCM.DB2LUW_DBMCFG WHERE IBMIOCM_DATABASE=?";
private String SELECT_DSM_DB2SET="SELECT REG_VAR_NAME AS NAME,REG_VAR_VALUE AS VALUE FROM IBMIOCM.DB2LUW_REG_VARIABLES WHERE IBMIOCM_DATABASE=?";
@Autowired
private JdbcTemplate jdbcTemplate;
public List<ParamRuleModel> getRulesByType(String type){
RowMapper<ParamRuleModel> paramRuleModelRowMapper= (resultSet, i) -> {
ParamRuleModel paramRuleModel=new ParamRuleModel();
paramRuleModel.setId(resultSet.getInt("ID"));
paramRuleModel.setName(resultSet.getString("NAME"));
paramRuleModel.setType(resultSet.getString("TYPE"));
paramRuleModel.setRule(resultSet.getString("RULE"));
switch (resultSet.getString("RESULT_TYPE")){
case "INT":paramRuleModel.setResultType(ResultType.INT);break;
case "STRING":paramRuleModel.setResultType(ResultType.STRING);break;
case "BOOL":paramRuleModel.setResultType(ResultType.BOOL);break;
}
paramRuleModel.setRequired(resultSet.getBoolean("REQUIRED"));
paramRuleModel.setFixRule(resultSet.getString("FIX_RULE"));
return paramRuleModel;
};
List<ParamRuleModel> result = jdbcTemplate.query(SELECT_RULES_BY_TYPE,paramRuleModelRowMapper,type);
return result;
}
public Map<String,Object> buildEnv(List<Map<String,Object>> params) {
return this.buildEnv(null,params);
}
public Map<String,Object> buildEnv(Map<String,Object> extraEnv,List<Map<String,Object>> params){
Map<String, Object> env = new HashMap<>();
if(extraEnv!=null)
env=extraEnv;
for(Map<String,Object> param:params){
String value=param.get("VALUE")==null? "nil":param.get("VALUE").toString();
param.replace("NAME",param.get("NAME").toString().replace(".","_").trim());
if(StringUtils.isNumber(value)&&!value.startsWith("0x")&&!value.contains("e")){
Object parseValue;
if (value.contains("\\."))
parseValue = Double.parseDouble(value);
else parseValue = Long.valueOf(value);
env.put(String.format("$%s_VALUE", param.get("NAME").toString()), parseValue==null? value:parseValue);
}
else {
env.put(String.format("$%s_VALUE", param.get("NAME").toString()), value);
}
if(param.containsKey("VALUE_FLAGS")) {
env.put(String.format("$%s_VALUE_FLAGS", param.get("NAME").toString()), param.get("VALUE_FLAGS"));
}
}
return env;
}
private List<ParamRuleModel> execRules(List<ParamRuleModel> rules,Map<String,Object> env){
for(ParamRuleModel rule:rules){
try {
Expression expression = AviatorEvaluator.compile(rule.getRule());
List<String> variables = expression.getVariableFullNames();
for(String var:variables){
if(env.containsKey(var)){
rule.setParams(rule.getParams()+String.format("%s=%s;",var,env.get(var).toString()));
}
else {
rule.setEvalResult(String.format("Can Not Find Var:%s",var));
}
}
Object result = expression.execute(env);
rule.setEvalResult(result);
}
catch (ExpressionRuntimeException runException){
rule.setEvalResult(runException.getMessage());
}
catch (ExpressionSyntaxErrorException synException){
rule.setEvalResult(synException.getMessage());
}
if(rule.getFixRule()!=null&&!StringUtils.isEmpty(rule.getFixRule()))
rule.setFixResult((String)AviatorEvaluator.execute(rule.getFixRule(),env));
}
return rules;
}
/**
* Check DBCfg Rules
* @param dbname
* @return
*/
public List<ParamRuleModel> getDBCFGChecked(String dbname){
List<Map<String,Object>> params = this.jdbcTemplate.queryForList(SELECT_DSM_DBCFG,dbname);
Map<String,Object> env=new HashMap<>();
env.put("$DB",dbname);
return this.getObjectChecked("DBCFG",params,env);
}
/**
* Check DBM Cfg Rules
* @param dbname
* @return
*/
public List<ParamRuleModel> getDBMCFGChecked(String dbname){
List<Map<String,Object>> params = this.jdbcTemplate.queryForList(SELECT_DSM_DBMCFG,dbname);
Map<String,Object> env=new HashMap<>();
env.put("$DB",dbname);
return this.getObjectChecked("DBMCFG",params,env);
}
/**
* Check DB2Set Rules
* @param dbname
* @return
*/
public List<ParamRuleModel> getDB2SetChecked(String dbname){
List<Map<String,Object>> params = this.jdbcTemplate.queryForList(SELECT_DSM_DB2SET,dbname);
Map<String,Object> env=new HashMap<>();
env.put("$DB",dbname);
return this.getObjectChecked("DB2SET",params,env);
}
/**
* Check Object Rules From Outside
* @param type
* @param objects
* @return
*/
public List<ParamRuleModel> getObjectChecked(String type,List<Map<String,Object>> objects){
return this.getObjectChecked(type,objects,null);
}
/**
* Check Object Rules From Outside
* @param type
* @param objects
* @return
*/
public List<ParamRuleModel> getObjectChecked(String type,List<Map<String,Object>> objects,Map<String,Object> extraEnv){
List<ParamRuleModel> rules = this.getRulesByType(type);
Map<String,Object> env=this.buildEnv(extraEnv,objects);
return this.execRules(rules,env);
}
}
|
package com.jenjinstudios.core.io;
import com.jenjinstudios.core.util.TypeMapper;
import com.jenjinstudios.core.xml.ArgumentType;
import com.jenjinstudios.core.xml.MessageType;
import java.util.Map;
import java.util.TreeMap;
/**
* The {@code Message} class is used in sending data to and receiving data from {@code Connection} objects. Each
* Message has a unique {@code name}, a unique {@code id}, and a {@code Map} of arguments which are accessed with the
* {@code getArgument} and {@code setObject} methods. </p> Message arguments may consist of any primitive type, as well
* as {@code String} objects, and {@code String} and {@code byte} arrays.
* @author Caleb Brinkman
*/
public class Message
{
public final String name;
private final MessageType messageType;
private final Map<String, Object> argumentsByName;
private final short id;
Message(MessageRegistry messageRegistry, short id, Object... args) {
this.id = id;
messageType = messageRegistry.getMessageType(id);
name = messageType.getName();
argumentsByName = new TreeMap<>();
for (int i = 0; i < messageType.getArguments().size(); i++)
{
try
{
setArgument(messageType.getArguments().get(i).getName(), args[i]);
} catch (ArrayIndexOutOfBoundsException ex)
{
throw new IllegalStateException(
"Not enough arguments provided for Message", ex);
}
}
}
Message(MessageType messageType) {
this.messageType = messageType;
this.name = messageType.getName();
id = messageType.getId();
argumentsByName = new TreeMap<>();
}
public void setArgument(String argumentName, Object argument) {
ArgumentType argType = null;
for (ArgumentType a : messageType.getArguments())
{
if (argumentName.equals(a.getName()))
{
argType = a;
}
}
if (argType == null)
throw new IllegalArgumentException("Invalid argument name for Message: " + argumentName +
" (Message type: " + messageType.getName() + ")");
Class c = TypeMapper.getTypeForName(argType.getType());
if (!c.isInstance(argument))
throw new IllegalArgumentException("Invalid argument type for Message: " + argument +
" (Expected " + argType.getType() + ", got " + argument.getClass() + ")");
argumentsByName.put(argumentName, argument);
}
public Object getArgument(String argumentName) {
return argumentsByName.get(argumentName);
}
public short getID() {
return id;
}
public final Object[] getArgs() {
if (isInvalid())
throw new IllegalStateException("Attempting to retrieve arguments while message is invalid. (Not all " +
"arguments have been set.)");
Object[] argsArray = new Object[messageType.getArguments().size()];
for (int i = 0; i < messageType.getArguments().size(); i++)
{
argsArray[i] = argumentsByName.get(messageType.getArguments().get(i).getName());
}
return argsArray;
}
boolean isInvalid() {
return argumentsByName.size() != messageType.getArguments().size();
}
@Override
public String toString() { return "Message " + id + " " + name; }
}
|
package com.jme3.gde.core.scene.state;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
import com.jme3.gde.core.scene.SceneApplication;
import com.jme3.material.Material;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.util.TangentBinormalGenerator;
import java.util.ArrayList;
import java.util.List;
import jme3tools.optimize.GeometryBatchFactory;
public final class NormalViewState extends BaseAppState {
private final Node debugNode = new Node("Normals debug");
private final float normalLength = 0.2f;
@Override
protected void initialize(final Application app) {
// not used
}
@Override
protected void cleanup(final Application app) {
// not used
}
@Override
protected void onEnable() {
final SceneApplication app = (SceneApplication) getApplication();
final List<Geometry> geometries = new ArrayList<>();
GeometryBatchFactory.gatherGeoms(app.getRootNode(), geometries);
final Material debugMat = app.getAssetManager().loadMaterial(
"Common/Materials/VertexColor.j3m");
for (final Geometry geometry : geometries) {
if(geometry.getMesh().getBuffer(Type.Normal) == null) {
continue;
}
final Geometry debug = new Geometry(geometry.getName(),
TangentBinormalGenerator.genNormalLines(geometry.getMesh(),
normalLength));
debug.setMaterial(debugMat);
debug.setCullHint(Spatial.CullHint.Never);
debug.setLocalTransform(geometry.getWorldTransform());
debugNode.attachChild(debug);
}
app.getRootNode().attachChild(debugNode);
}
@Override
public void update(float tpf) {
super.update(tpf);
final Node rootNode = ((SceneApplication) getApplication()).getRootNode();
for (final Spatial debug : debugNode.getChildren()) {
final Spatial original = rootNode.getChild(debug.getName());
if (original == null) {
debug.removeFromParent();
continue;
}
debug.setLocalTransform(original.getWorldTransform());
}
}
@Override
protected void onDisable() {
debugNode.removeFromParent();
debugNode.detachAllChildren();
}
}
|
package com.jme.input.lwjgl;
import java.util.logging.Level;
import org.lwjgl.input.Mouse;
import com.jme.input.MouseButtonStateType;
import com.jme.input.MouseInput;
import com.jme.util.LoggingSystem;
import com.jme.widget.impl.lwjgl.WidgetLWJGLStandardCursor;
/**
* <code>LWJGLMouseInput</code> handles mouse input via the LWJGL Input API.
*
* @author Mark Powell
* @version $Id: LWJGLMouseInput.java,v 1.7 2004-11-16 20:45:41 renanse Exp $
*/
public class LWJGLMouseInput implements MouseInput {
private MouseButtonStateType buttonType = MouseButtonStateType.MOUSE_BUTTON_NONE;
private MouseButtonStateType previousButtonType = MouseButtonStateType.MOUSE_BUTTON_NONE;
private int dx, dy;
private boolean virgin = true;
/**
* Constructor creates a new <code>LWJGLMouseInput</code> object. A call
* to the LWJGL creation method is made, if any problems occur during
* this creation, it is logged.
*
*/
public LWJGLMouseInput() {
try {
Mouse.create();
setCursorVisible(false);
} catch (Exception e) {
LoggingSystem.getLogger().log(Level.WARNING, "Problem during " + "creation of Mouse.");
}
}
/**
* <code>destroy</code> cleans up the native mouse reference.
* @see com.jme.input.MouseInput#destroy()
*/
public void destroy() {
setCursorVisible(false);
Mouse.destroy();
}
/**
* <code>getButtonIndex</code> returns the index of a given button name.
* @see com.jme.input.MouseInput#getButtonIndex(java.lang.String)
*/
public int getButtonIndex(String buttonName) {
return Mouse.getButtonIndex(buttonName);
}
/**
* <code>getButtonName</code> returns the name of a given button index.
* @see com.jme.input.MouseInput#getButtonName(int)
*/
public String getButtonName(int buttonIndex) {
return Mouse.getButtonName(buttonIndex);
}
/**
* <code>isButtonDown</code> tests if a given button is pressed or not.
* @see com.jme.input.MouseInput#isButtonDown(int)
*/
public boolean isButtonDown(int buttonCode) {
return Mouse.isButtonDown(buttonCode);
}
/**
* <code>isCreated</code> returns false if the mouse is created, false
* otherwise.
* @see com.jme.input.MouseInput#isCreated()
*/
public boolean isCreated() {
return Mouse.isCreated();
}
/**
* <code>poll</code> gets the current state of the mouse.
* @see com.jme.input.MouseInput#poll()
*/
public void poll() {
dx = Mouse.getDX();
dy = Mouse.getDY();
if (virgin && (dx != 0 || dy != 0)) {
dx = dy = 0;
virgin = false;
}
}
/**
* <code>getWheelDelta</code> retrieves the change of the mouse wheel,
* if any.
* @see com.jme.input.MouseInput#getWheelDelta()
*/
public int getWheelDelta() {
return Mouse.getDWheel();
}
/**
* <code>getXDelta</code> retrieves the change of the x position, if any.
* @see com.jme.input.MouseInput#getXDelta()
*/
public int getXDelta() {
return dx;
}
/**
* <code>getYDelta</code> retrieves the change of the y position, if any.
* @see com.jme.input.MouseInput#getYDelta()
*/
public int getYDelta() {
return dy;
}
/**
* <code>getXAbsolute</code> gets the absolute x axis value.
* @see com.jme.input.MouseInput#getXAbsolute()
*/
public int getXAbsolute() {
return Mouse.getX();
}
/**
* <code>getYAbsolute</code> gets the absolute y axis value.
* @see com.jme.input.MouseInput#getYAbsolute()
*/
public int getYAbsolute() {
return Mouse.getY();
}
/**
* <code>updateState</code> updates the mouse state.
* @see com.jme.input.MouseInput#updateState()
*/
public void updateState() {
poll();
setButtonStateType();
}
private void setButtonStateType() {
int button = 0;
previousButtonType = buttonType;
for (int i = 0; i < Mouse.getButtonCount(); i++) {
if (Mouse.isButtonDown(i)) {
switch (i) {
case 0 :
button |= BUTTON_1;
break;
case 1 :
button |= BUTTON_2;
break;
case 2 :
button |= BUTTON_3;
break;
}
}
}
switch (button) {
case 0 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_NONE;
break;
case BUTTON_1 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_1;
break;
case BUTTON_2 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_2;
break;
case BUTTON_3 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_3;
break;
case BUTTON_1_2 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_1_2;
break;
case BUTTON_1_3 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_1_3;
break;
case BUTTON_2_3 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_2_3;
break;
case BUTTON_1_2_3 :
buttonType = MouseButtonStateType.MOUSE_BUTTON_1_2_3;
break;
}
}
/**
* <code>setCursorVisible</code> sets the visiblity of the hardware cursor.
* @see com.jme.input.MouseInput#setCursorVisible(boolean)
*/
public void setCursorVisible(boolean v) {
Mouse.setGrabbed(!v);
try {
if (v) {
Mouse.setNativeCursor(WidgetLWJGLStandardCursor.cursor);
} else {
Mouse.setNativeCursor(null);
}
} catch (Exception e) {
LoggingSystem.getLogger().log(Level.WARNING, "Problem showing mouse cursor.");
}
}
/**
* <code>isCursorVisible</code> Returns true if a cursor is currently bound.
* @see com.jme.input.MouseInput#isCursorVisible()
*/
public boolean isCursorVisible() {
return Mouse.getNativeCursor() != null;
}
/**
*
* @return The current state of the mouse's buttons.
*/
public MouseButtonStateType getButtonType() {
return buttonType;
}
/**
* @return the state of the mouse buttons.
* @see com.jme.input.MouseInput#getButtonState()
*/
public MouseButtonStateType getButtonState() {
return buttonType;
}
/**
* @return the previous state of the mouse buttons.
* @see com.jme.input.MouseInput#getPreviousButtonState()
*/
public MouseButtonStateType getPreviousButtonState() {
return previousButtonType;
}
}
|
package com.timepath.swing;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.DropMode;
import javax.swing.JComponent;
import javax.swing.JTree;
import javax.swing.TransferHandler;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
/**
*
* @author timepath
*/
public class ReorderableJTree extends JTree {
private static final Logger LOG = Logger.getLogger(ReorderableJTree.class.getName());
private static final long serialVersionUID = 1L;
public ReorderableJTree() {
setDragEnabled(true);
setDropMode(DropMode.ON_OR_INSERT);
setTransferHandler(new TreeTransferHandler());
}
//<editor-fold defaultstate="collapsed" desc="Drag levels">
private int minDropLevel = -1;
public int getMinDropLevel() {
return minDropLevel;
}
public void setMinDropLevel(int minDropLevel) {
this.minDropLevel = minDropLevel;
}
private int maxDropLevel = -1;
public int getMaxDropLevel() {
return maxDropLevel;
}
/**
* Sets the maximum dropping level
* <p/>
* @param maxDropLevel s
*/
public void setMaxDropLevel(int maxDropLevel) {
this.maxDropLevel = maxDropLevel;
}
private int minDragLevel = -1;
public int getMinDragLevel() {
return minDragLevel;
}
/**
* Sets the minimum level of allowed movable nodes
* <p/>
* @param minDragLevel
*/
public void setMinDragLevel(int minDragLevel) {
this.minDragLevel = minDragLevel;
}
private int maxDragLevel = -1;
public int getMaxDragLevel() {
return maxDragLevel;
}
public void setMaxDragLevel(int maxDragLevel) {
this.maxDragLevel = maxDragLevel;
}
//</editor-fold>
private class TreeTransferHandler extends TransferHandler {
private static final long serialVersionUID = 1L;
private DataFlavor nodesFlavor;
private DataFlavor[] flavors;
private DefaultMutableTreeNode[] nodesToRemove;
//<editor-fold defaultstate="collapsed" desc="Helpers">
private boolean haveCompleteNode(JTree tree) {
int[] selRows = tree.getSelectionRows(); // XXX: bad
if(selRows == null || selRows.length == 0) {
return true;
}
TreePath path = tree.getPathForRow(selRows[0]);
DefaultMutableTreeNode first = (DefaultMutableTreeNode) path.getLastPathComponent();
int childCount = first.getChildCount();
// first has children and no children are selected.
if(childCount > 0 && selRows.length == 1) {
return false;
}
// first may have children.
for(int i = 1; i < selRows.length; i++) {
path = tree.getPathForRow(selRows[i]);
DefaultMutableTreeNode next = (DefaultMutableTreeNode) path.getLastPathComponent();
if(first.isNodeChild(next)) {
// Found a child of first.
if(childCount > selRows.length - 1) {
// Not all children of first are selected.
return false;
}
}
}
return true;
}
/**
* Defensive copy used in createTransferable.
*/
private DefaultMutableTreeNode copy(TreeNode node) {
return new DefaultMutableTreeNode(node);
}
//</editor-fold>
TreeTransferHandler() {
try {
String mimeType = DataFlavor.javaJVMLocalObjectMimeType
+ ";class=\""
+ javax.swing.tree.DefaultMutableTreeNode[].class.getName()
+ "\"";
nodesFlavor = new DataFlavor(mimeType);
flavors = new DataFlavor[] {nodesFlavor};
} catch(ClassNotFoundException e) {
LOG.log(Level.SEVERE, "ClassNotFound: {0}", e.getMessage());
}
}
//<editor-fold defaultstate="collapsed" desc="Export">
@Override
public int getSourceActions(JComponent c) {
return TransferHandler.COPY_OR_MOVE;
}
@Override
protected Transferable createTransferable(JComponent c) {
JTree tree = (JTree) c;
TreePath[] paths = tree.getSelectionPaths();
if(paths != null) {
// Make up a node array of copies for transfer and
// another for/of the nodes that will be removed in
// exportDone after a successful drop.
List<DefaultMutableTreeNode> copies = new ArrayList<DefaultMutableTreeNode>();
List<DefaultMutableTreeNode> toRemove = new ArrayList<DefaultMutableTreeNode>();
DefaultMutableTreeNode node = (DefaultMutableTreeNode) paths[0].getLastPathComponent();
DefaultMutableTreeNode copy = copy(node);
copies.add(copy);
toRemove.add(node);
for(int i = 1; i < paths.length; i++) {
DefaultMutableTreeNode next = (DefaultMutableTreeNode) paths[i].getLastPathComponent();
// Do not allow higher level nodes to be added to list.
if(next.getLevel() < node.getLevel()) {
break;
} else if(next.getLevel() > node.getLevel()) { // child node
copy.add(copy(next));
// node already contains child
} else { // sibling
copies.add(copy(next));
toRemove.add(next);
}
}
DefaultMutableTreeNode[] nodes = copies.toArray(
new DefaultMutableTreeNode[copies.size()]);
nodesToRemove = toRemove.toArray(new DefaultMutableTreeNode[toRemove.size()]);
return new NodesTransferable(nodes);
}
return null;
}
@Override
protected void exportDone(JComponent source, Transferable data, int action) {
if((action & MOVE) == MOVE) {
JTree tree = (JTree) source;
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
// Remove nodes saved in nodesToRemove in createTransferable.
for(int i = 0; i < nodesToRemove.length; i++) {
model.removeNodeFromParent(nodesToRemove[i]);
}
}
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Import">
@Override
public boolean canImport(TransferHandler.TransferSupport support) {
if(!support.isDataFlavorSupported(nodesFlavor)) {
return false;
}
support.setShowDropLocation(true);
// Get drop location info
JTree tree = (JTree) support.getComponent();
// DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
JTree.DropLocation dl = (JTree.DropLocation) support.getDropLocation();
// int childIndex = dl.getChildIndex();
TreePath dest = dl.getPath();
if(dest == null) {
return false;
}
DefaultMutableTreeNode target = (DefaultMutableTreeNode) dest.getLastPathComponent();
// Convert nodes to usable format
DefaultMutableTreeNode[] clodedNodes;
try {
clodedNodes = (DefaultMutableTreeNode[]) support.getTransferable().getTransferData(
nodesFlavor);
} catch(Exception ex) {
return false;
}
DefaultMutableTreeNode[] nodes = new DefaultMutableTreeNode[clodedNodes.length];
for(int i = 0; i < clodedNodes.length; i++) {
nodes[i] = (DefaultMutableTreeNode) clodedNodes[i].getUserObject();
}
// Sanity check
if((maxDropLevel > -1 && target.getLevel() > maxDropLevel) || (minDropLevel > -1 && target.getLevel() < minDropLevel)) {
return false;
}
// Do not allow MOVE-action drops if a non-leaf node is
// selected unless all of its children are also selected.
if(support.getDropAction() == MOVE && !haveCompleteNode(tree)) {
// return false;
}
for(int i = 0; i < nodes.length; i++) {
if((minDragLevel > -1 && nodes[i].getLevel() < minDragLevel) || (maxDragLevel > -1 && nodes[i].getLevel() > maxDragLevel)) {
return false;
}
// Do not allow a drop on the drag source selections
if(nodes[i] == target) {
return false;
}
// Do not allow a drop on the drag source's descendants
if(nodes[i].isNodeDescendant(target)) {
return false;
}
}
return true;
}
@Override
public boolean importData(TransferHandler.TransferSupport support) {
if(!support.isDrop()) {
return false; // Pasting.
}
if(!canImport(support)) {
return false;
}
// Get drop location info.
JTree tree = (JTree) support.getComponent();
DefaultTreeModel model = (DefaultTreeModel) tree.getModel();
JTree.DropLocation dl = (JTree.DropLocation) support.getDropLocation();
int childIndex = dl.getChildIndex();
TreePath dest = dl.getPath();
// Extract transfer data.
DefaultMutableTreeNode[] nodes = null;
try {
nodes = (DefaultMutableTreeNode[]) support.getTransferable().getTransferData(
nodesFlavor);
} catch(UnsupportedFlavorException ufe) {
LOG.log(Level.WARNING, "UnsupportedFlavor: {0}", ufe.getMessage());
} catch(java.io.IOException ioe) {
LOG.log(Level.WARNING, "I/O error: {0}", ioe.getMessage());
}
if(nodes == null) {
return false;
}
// Do stuff with data.
DefaultMutableTreeNode parent = (DefaultMutableTreeNode) dest.getLastPathComponent();
// Configure for drop mode.
int index = childIndex; // DropMode.INSERT
if(childIndex == -1) { // DropMode.ON
index = parent.getChildCount(); // End of list
}
// Add data to model.
for(int i = 0; i < nodes.length; i++) {
model.insertNodeInto(nodes[i], parent, index++);
}
return true;
}
//</editor-fold>
@Override
public String toString() {
return getClass().getName();
}
private class NodesTransferable implements Transferable {
private DefaultMutableTreeNode[] nodes;
NodesTransferable(DefaultMutableTreeNode[] nodes) {
this.nodes = nodes;
}
public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException {
if(!isDataFlavorSupported(flavor)) {
throw new UnsupportedFlavorException(flavor);
}
return nodes;
}
public DataFlavor[] getTransferDataFlavors() {
return flavors;
}
public boolean isDataFlavorSupported(DataFlavor flavor) {
return nodesFlavor.equals(flavor);
}
}
}
}
|
package com.zzg.demo.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import android.content.Intent;
import android.os.Bundle;
import android.widget.Button;
import com.baidu.location.BDLocation;
import com.baidu.mapapi.map.BaiduMap.OnMarkerClickListener;
import com.baidu.mapapi.map.BitmapDescriptor;
import com.baidu.mapapi.map.BitmapDescriptorFactory;
import com.baidu.mapapi.map.InfoWindow;
import com.baidu.mapapi.map.InfoWindow.OnInfoWindowClickListener;
import com.baidu.mapapi.map.Marker;
import com.baidu.mapapi.map.MarkerOptions;
import com.baidu.mapapi.map.OverlayOptions;
import com.baidu.mapapi.model.LatLng;
import com.baidu.mapapi.utils.DistanceUtil;
import com.zzg.demo.MyApplication;
import com.zzg.demo.R;
import com.zzg.demo.base.BaseMapActivity;
import com.zzg.demo.bean.User;
/**
* @author acer Descrption:TODO WHAT 2015-4-14 2:18:11
*/
public class NearFriendsActivity extends BaseMapActivity {
private Marker mMarker;
List<User> users = MyApplication.getInstance().getUsers();
BitmapDescriptor bdA = BitmapDescriptorFactory
.fromResource(R.drawable.icon_gcoding);
private InfoWindow mInfoWindow;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mBaiduMap.setOnMarkerClickListener(new OnMarkerClickListener() {
public boolean onMarkerClick(Marker marker) {
User user = (User) markerMap.get(marker);
LatLng position = marker.getPosition();
Button button = new Button(getApplicationContext());
button.setText(user.getName());
OnInfoWindowClickListener listener = null;
listener = new OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick() {
Intent intent = new Intent();
intent.putExtra("from", "other");
startActivity(intent);
}
};
mInfoWindow = new InfoWindow(BitmapDescriptorFactory
.fromView(button), position, -47, listener);
mBaiduMap.showInfoWindow(mInfoWindow);
return false;
}
});
}
Map markerMap = new HashMap();
BitmapDescriptor bitmap;
Marker[] markers = new Marker[users.size()];
OverlayOptions[] options = new OverlayOptions[users.size()];
private List<Entry<User, Double>> sort(Map map) {
// TODO Auto-generated method stub
List<Map.Entry<User, Double>> list_Data = new ArrayList<Map.Entry<User, Double>>(
map.entrySet());
// Collections.sort(list_Data, new Comparator<Map.Entry<User, Double>>() {
// public int compare(Map.Entry<User, Double> o1,
// Map.Entry<User, String> o2) {
// double d2 = Double.valueOf(o2.getValue());
// double d1 = Double.valueOf(o1.getValue());
// if (o2.getValue() != null && o1.getValue() != null
// && o2.getValue().compareTo(o1.getValue()) > 0) {
// return 1;
// } else {
// return -1;
return list_Data;
}
@Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
bitmap.recycle();
}
@Override
public void initOverLay(BDLocation location) {
// TODO Auto-generated method stub
// TODO Auto-generated method stub
Map map = new HashMap<User, Integer>();
Map distanceMap = new HashMap();
double[] distances = new double[users.size()];
bitmap = BitmapDescriptorFactory.fromResource(R.drawable.icon_gcoding);
for (int i = 0; i < users.size(); i++) {
User user = users.get(i);
BDLocation userlocation = user.getLocation();
LatLng userLatLng = new LatLng(userlocation.getLatitude(),
userlocation.getLongitude());
LatLng myLatLng = new LatLng(location.getLatitude(),
location.getLongitude());
distances[i] = DistanceUtil.getDistance(userLatLng, myLatLng);
distanceMap.put(user, distances[i]);
System.out.println(""+i+"==========>>>>>>"+user.getLocation().getLatitude()+"||||"+user
.getLocation().getLongitude());
// Maker
LatLng point = new LatLng(user.getLocation().getLatitude(), user
.getLocation().getLongitude());
options[i] = new MarkerOptions().position(point).icon(bitmap);
markers[i] = (Marker) mBaiduMap.addOverlay(options[i]);
markers[i].setTitle(user.getName());
markerMap.put(markers[i], user);
}
List<Entry<User, Double>> userlist = sort(distanceMap);
// for (int i = 0; i < userlist.size(); i++) {
// User user = userlist.get(i).getKey();
// map.put(user, i);
// .getLocation().getLongitude());
// // Maker
// LatLng point = new LatLng(user.getLocation().getLatitude(), user
// .getLocation().getLongitude());
// options[i] = new MarkerOptions().position(point).icon(bitmap);
// markers[i].setTitle(user.getName());
// markerMap.put(markers[i], user);
}
}
|
package dr.evomodel.operators;
import dr.evolution.tree.NodeRef;
import dr.evomodel.tree.TreeModel;
import dr.inference.operators.OperatorFailedException;
import dr.math.MathUtils;
import dr.xml.*;
/**
* Implements branch exchange operations. There is a NARROW and WIDE variety.
* The narrow exchange is very similar to a rooted-tree nearest-neighbour
* interchange but with the restriction that node height must remain consistent.
* <p/>
* KNOWN BUGS: WIDE operator cannot be used on trees with 4 or less tips!
*/
public class ExchangeOperator extends AbstractTreeOperator {
public static final String NARROW_EXCHANGE = "narrowExchange";
public static final String WIDE_EXCHANGE = "wideExchange";
public static final String INTERMEDIATE_EXCHANGE = "intermediateExchange";
public static final int NARROW = 0;
public static final int WIDE = 1;
public static final int INTERMEDIATE = 2;
private static final int MAX_TRIES = 100;
private int mode = NARROW;
private final TreeModel tree;
private double[] distances;
public ExchangeOperator(int mode, TreeModel tree, double weight) {
this.mode = mode;
this.tree = tree;
setWeight(weight);
}
public double doOperation() throws OperatorFailedException {
final int tipCount = tree.getExternalNodeCount();
double hastingsRatio = 0;
switch( mode ) {
case NARROW:
narrow();
break;
case WIDE:
wide();
break;
case INTERMEDIATE:
hastingsRatio = intermediate();
break;
}
assert tree.getExternalNodeCount() == tipCount :
"Lost some tips in " + ((mode == NARROW) ? "NARROW mode." : "WIDE mode.");
return hastingsRatio;
}
/**
* WARNING: Assumes strictly bifurcating tree.
*/
public void narrow() throws OperatorFailedException {
final int nNodes = tree.getNodeCount();
final NodeRef root = tree.getRoot();
NodeRef i = root;
while( root == i || tree.getParent(i) == root ) {
i = tree.getNode(MathUtils.nextInt(nNodes));
}
final NodeRef iParent = tree.getParent(i);
final NodeRef iGrandParent = tree.getParent(iParent);
NodeRef iUncle = tree.getChild(iGrandParent, 0);
if( iUncle == iParent ) {
iUncle = tree.getChild(iGrandParent, 1);
}
assert iUncle == getOtherChild(tree, iGrandParent, iParent);
assert tree.getNodeHeight(i) <= tree.getNodeHeight(iGrandParent);
if( tree.getNodeHeight(iUncle) < tree.getNodeHeight(iParent) ) {
exchangeNodes(tree, i, iUncle, iParent, iGrandParent);
// exchangeNodes generates the events
//tree.pushTreeChangedEvent(iParent);
//tree.pushTreeChangedEvent(iGrandParent);
} else {
throw new OperatorFailedException("Couldn't find valid narrow move on this tree!!");
}
}
/**
* WARNING: Assumes strictly bifurcating tree.
*/
public void wide() throws OperatorFailedException {
final int nodeCount = tree.getNodeCount();
final NodeRef root = tree.getRoot();
NodeRef i = root;
while( root == i ) {
i = tree.getNode(MathUtils.nextInt(nodeCount));
}
NodeRef j = i;
while( j == i || j == root ) {
j = tree.getNode(MathUtils.nextInt(nodeCount));
}
final NodeRef iP = tree.getParent(i);
final NodeRef jP = tree.getParent(j);
if( (iP != jP) && (i != jP) && (j != iP)
&& (tree.getNodeHeight(j) < tree.getNodeHeight(iP))
&& (tree.getNodeHeight(i) < tree.getNodeHeight(jP)) ) {
exchangeNodes(tree, i, j, iP, jP);
// System.out.println("tries = " + tries+1);
return;
}
throw new OperatorFailedException("Couldn't find valid wide move on this tree!");
}
/**
* @deprecated WARNING: SHOULD NOT BE USED!
* WARNING: Assumes strictly bifurcating tree.
*/
public double intermediate() throws OperatorFailedException {
final int nodeCount = tree.getNodeCount();
final NodeRef root = tree.getRoot();
for(int tries = 0; tries < MAX_TRIES; ++tries) {
NodeRef i, j;
NodeRef[] possibleNodes;
do {
// get a random node
i = root; // tree.getNode(MathUtils.nextInt(nodeCount));
// if (root != i) {
// possibleNodes = tree.getNodes();
// check if we got the root
while( root == i ) {
// if so get another one till we haven't got anymore the
// root
i = tree.getNode(MathUtils.nextInt(nodeCount));
// if (root != i) {
// possibleNodes = tree.getNodes();
}
possibleNodes = tree.getNodes();
// get another random node
// NodeRef j = tree.getNode(MathUtils.nextInt(nodeCount));
j = getRandomNode(possibleNodes, i);
// check if they are the same and if the new node is the root
} while( j == null || j == i || j == root );
double forward = getWinningChance(indexOf(possibleNodes, j));
// possibleNodes = getPossibleNodes(j);
calcDistances(possibleNodes, j);
forward += getWinningChance(indexOf(possibleNodes, i));
// get the parent of both of them
final NodeRef iP = tree.getParent(i);
final NodeRef jP = tree.getParent(j);
// check if both parents are equal -> we are siblings :) (this
// wouldnt effect a change on topology)
// check if I m your parent or vice versa (this would destroy the
// tree)
// check if you are younger then my father
// check if I m younger then your father
if( (iP != jP) && (i != jP) && (j != iP)
&& (tree.getNodeHeight(j) < tree.getNodeHeight(iP))
&& (tree.getNodeHeight(i) < tree.getNodeHeight(jP)) ) {
// if 1 & 2 are false and 3 & 4 are true then we found a valid
// candidate
exchangeNodes(tree, i, j, iP, jP);
// possibleNodes = getPossibleNodes(i);
calcDistances(possibleNodes, i);
double backward = getWinningChance(indexOf(possibleNodes, j));
// possibleNodes = getPossibleNodes(j);
calcDistances(possibleNodes, j);
backward += getWinningChance(indexOf(possibleNodes, i));
// System.out.println("tries = " + tries+1);
return Math.log(Math.min(1, (backward) / (forward)));
// return 0.0;
}
}
throw new OperatorFailedException("Couldn't find valid wide move on this tree!");
}
/* why not use Arrays.asList(a).indexOf(n) ? */
private int indexOf(NodeRef[] a, NodeRef n) {
for(int i = 0; i < a.length; i++) {
if( a[i] == n ) {
return i;
}
}
return -1;
}
private double getWinningChance(int index) {
double sum = 0;
for( double distance : distances ) {
sum += (1.0 / distance);
}
return (1.0 / distances[index]) / sum;
}
private void calcDistances(NodeRef[] nodes, NodeRef ref) {
distances = new double[nodes.length];
for(int i = 0; i < nodes.length; i++) {
distances[i] = getNodeDistance(ref, nodes[i]) + 1;
}
}
private NodeRef getRandomNode(NodeRef[] nodes, NodeRef ref) {
calcDistances(nodes, ref);
double sum = 0;
for( double distance : distances ) {
sum += 1.0 / distance;
}
double randomValue = MathUtils.nextDouble() * sum;
NodeRef n = null;
for(int i = 0; i < distances.length; i++) {
randomValue -= 1.0 / distances[i];
if( randomValue <= 0 ) {
n = nodes[i];
break;
}
}
return n;
}
private int getNodeDistance(NodeRef i, NodeRef j) {
int count = 0;
while( i != j ) {
count++;
if( tree.getNodeHeight(i) < tree.getNodeHeight(j) ) {
i = tree.getParent(i);
} else {
j = tree.getParent(j);
}
}
return count;
}
public int getMode() {
return mode;
}
public String getOperatorName() {
return ((mode == NARROW) ? "Narrow" : "Wide") + " Exchange" + "(" + tree.getId() + ")";
}
public double getMinimumAcceptanceLevel() {
if( mode == NARROW ) {
return 0.05;
} else {
return 0.01;
}
}
public double getMinimumGoodAcceptanceLevel() {
if( mode == NARROW ) {
return 0.05;
} else {
return 0.01;
}
}
public String getPerformanceSuggestion() {
return "";
// if( MCMCOperator.Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel() ) {
// return "";
// } else if( MCMCOperator.Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel() ) {
// return "";
// } else {
// return "";
}
public static XMLObjectParser NARROW_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return NARROW_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(NARROW, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a narrow exchange operator. "
+ "This operator swaps a random subtree with its uncle.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
};
public static XMLObjectParser WIDE_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return WIDE_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(WIDE, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a wide exchange operator. "
+ "This operator swaps two random subtrees.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules;{
rules = new XMLSyntaxRule[]{
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
}
};
public static XMLObjectParser INTERMEDIATE_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return INTERMEDIATE_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(INTERMEDIATE, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a intermediate exchange operator. "
+ "This operator swaps two random subtrees.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
};
}
|
package edu.umn.csci5801.model;
import java.util.List;
public class ProgressSummary {
private Student student;
private Department department;
private Degree degree;
private Term termBegan;
private List<Professor> advisors;
private List<Professor> committee;
private List<GradReqCheck> reqCheckResult;
private List<String> notes;
//Constructor
public void ProgressSummary(Student s, Department dep, Degree deg, Term tb,
List<Professor> a, List<Professor> c, List<String> n) {
student = s;
department = dep;
degree = deg;
termBegan = tb;
advisors = a;
committee = c;
notes = n;
}
public void checkGradStatus() {
List<Requirement> requirements;
GradReqCheck gradReqCheck = new GradReqCheck();
requirements = gradReqCheck.createReqList(degree);
gradReqCheck.testReqList();
reqCheckResult = gradReqCheck.getReqChecks()
}
}
|
package edu.wustl.common.util.dbManager;
import java.sql.Connection;
import java.sql.SQLException;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.Session;
import net.sf.hibernate.SessionFactory;
import net.sf.hibernate.cfg.Configuration;
import edu.wustl.common.util.logger.Logger;
public class DBUtil
{
//A factory for DB Session which provides the Connection for client.
private static SessionFactory m_sessionFactory;
//ThreadLocal to hold the Session for the current executing thread.
private static final ThreadLocal threadLocal = new ThreadLocal();
//Initialize the session Factory in the Static block.
static
{
try
{
Configuration cfg = new Configuration();
m_sessionFactory = cfg.configure().buildSessionFactory();
HibernateMetaData.initHibernateMetaData(cfg);
}
catch (Exception ex)
{
ex.printStackTrace();
Logger.out.debug("Exception: " + ex.getMessage(), ex);
throw new RuntimeException(ex.getMessage());
}
// try
// File file = new File(Variables.catissueHome+System.getProperty("file.separator")+"db.properties");
// Logger.out.info("File "+file);
// BufferedInputStream stram = new BufferedInputStream(new FileInputStream(file));
// Properties p = new Properties();
// p.load(stram);
// stram.close();
// Configuration cfg = new Configuration();
// cfg.setProperties(p);
// m_sessionFactory = cfg.configure().buildSessionFactory();
// HibernateMetaData.initHibernateMetaData(cfg);
// catch(Exception ex)
// ex.printStackTrace();
// Logger.out.debug("Exception: "+ex.getMessage(),ex);
// throw new RuntimeException(ex.getMessage());
}
/**
* Follows the singleton pattern and returns only current opened session.
* @return Returns the current db session.
* */
public static Session currentSession() throws HibernateException
{
Session s = (Session) threadLocal.get();
//Open a new Session, if this Thread has none yet
if (s == null)
{
s = m_sessionFactory.openSession();
try
{
s.connection().setAutoCommit(false);
}
catch (SQLException ex)
{
throw new HibernateException(ex.getMessage(), ex);
}
threadLocal.set(s);
}
return s;
}
/**
* Close the currently opened session.
* */
public static void closeSession() throws HibernateException
{
Session s = (Session) threadLocal.get();
threadLocal.set(null);
if (s != null)
s.close();
}
public static Connection getConnection() throws HibernateException
{
return currentSession().connection();
}
public static void closeConnection() throws HibernateException
{
closeSession();
}
/**
* This method opens a new session, loads an object with given class and Id, and closes
* the session. This method should be used only when an object is to be opened in separate session.
*
* @param objectClass class of the object
* @param identifier id of the object
* @return object
* @throws HibernateException
*/
public static Object loadCleanObj(Class objectClass, Long identifier) throws HibernateException
{
Session session = null;
try
{
session = m_sessionFactory.openSession();
return session.load(objectClass, identifier);
}
catch (HibernateException e)
{
throw e;
}
finally
{
session.close();
}
}
}
|
package edu.yalestc.yalepublic.news;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import edu.yalestc.yalepublic.R;
import edu.yalestc.yalepublic.news.RssReader;
import edu.yalestc.yalepublic.news.RssFeed;
public class NewsReader extends Activity {
TextView tRSSTitle, tRSSContent;
RssFeed feed;
// Check for connectivity, return true if connected or connecting.
public boolean isOnline() {
ConnectivityManager cm =
(ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
return cm.getActiveNetworkInfo() != null &&
cm.getActiveNetworkInfo().isConnectedOrConnecting();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.news_items);
tRSSTitle = (TextView) findViewById(R.id.tvRSSTitle);
tRSSContent = (TextView) findViewById(R.id.tvRSSContent);
// If we're online, downloads the RSS Feed and returns it as `feed`
if (isOnline()) {
NewsDownload start = new NewsDownload();
try {
feed = start.execute("http://news.yale.edu/news-rss").get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
} else {
Log.d("NewsReader", "Please connect to Internet");
}
if (feed != null) { // EHOSTUNREACH: No route to hoast
ArrayList<RssItem> rssItems = feed.getRssItems();
ArrayList<String> rssTitles = new ArrayList<String>();
ArrayList<String> rssLinks = new ArrayList<String>();
ArrayList<String> rssDescription = new ArrayList<String>();
ArrayList<String> rssContent = new ArrayList<String>();
/*private String title;
private String link;
private Date pubDate;
private String description;
private String content;*/
for (RssItem rssItem : rssItems) {
Log.d("RSS Reader", rssItem.getTitle());
rssTitles.add(rssItem.getTitle());
rssLinks.add(rssItem.getLink());
rssDescription.add(rssItem.getDescription());
rssContent.add(rssItem.getContent());
}
/*String[] video_arrays = {"video1", "video2"};
List<String> videos = new ArrayList<String>(Arrays.asList(video_arrays)); */
// Parameters: Activity (Context), Layout file, Id of TextView, Array that's adapted
/*final ArrayAdapter<String> mNewsAdapter;
mNewsAdapter = new ArrayAdapter<String>(
this, R.layout.news_tab, R.id.tvTitle, rssTitles);*/
//ArrayList<RssItem> rssItems = feed.getRssItems();
//List<String> rssData = rssItems;
// TODO: Convert ArrayList<rssItem> into an array of strings
ListView listView = (ListView) findViewById(R.id.listNews);
listView.setAdapter(new NewsAdapter(this, R.layout.news_tab, rssItems));
//listView.setAdapter(mNewsAdapter);
}
}
public class NewsAdapter extends ArrayAdapter<RssItem> {
private final Context context;
private final ArrayList<RssItem> data;
private final int layoutResourceId;
// fix here
public NewsAdapter(Context context, int layoutResourceId, ArrayList<RssItem> data) {
super(context, layoutResourceId, data);
this.context = context;
this.data = data;
this.layoutResourceId = layoutResourceId;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View row = convertView;
ViewHolder holder = null;
if(row == null) {
LayoutInflater inflater = ((Activity)context).getLayoutInflater();
row = inflater.inflate(layoutResourceId, parent, false);
holder = new ViewHolder();
holder.textView1 = (TextView) row.findViewById(R.id.tvTitle);
holder.textView2 = (TextView) row.findViewById(R.id.tvDate);
holder.textView3 = (TextView) row.findViewById(R.id.tvDescription);
row.setTag(holder);
} else {
holder = (ViewHolder) row.getTag();
}
RssItem rItem = data.get(position);
holder.textView1.setText(rItem.getTitle());
holder.textView2.setText(rItem.getDescription());
holder.textView3.setText(rItem.getContent());
return row;
}
private class ViewHolder {
TextView textView1;
TextView textView2;
TextView textView3;
}
}
}
|
package org.lenskit.cli.commands;
import com.google.auto.service.AutoService;
import com.google.common.base.Stopwatch;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.Namespace;
import org.lenskit.api.RecommenderBuildException;
import org.lenskit.cli.Command;
import org.lenskit.cli.util.InputData;
import org.lenskit.cli.util.ScriptEnvironment;
import org.lenskit.eval.temporal.TemporalEvaluator;
import org.lenskit.eval.traintest.AlgorithmInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Simulates a recommender algorithm over time.
*/
@AutoService(Command.class)
public class Simulate implements Command {
private final Logger logger = LoggerFactory.getLogger(Simulate.class);
public void configureArguments(ArgumentParser parser) {
parser.description("Simulates a recommender over time");
ScriptEnvironment.configureArguments(parser);
InputData.configureArguments(parser, true);
parser.addArgument("-o", "--output-file")
.type(File.class)
.metavar("FILE")
.setDefault("predictions.csv")
.help("write predictions and errors to FILE");
parser.addArgument("--extended-output")
.type(File.class)
.metavar("FILE")
.setDefault("extended-output.txt")
.help("write extended output as JSON lines in FILE");
parser.addArgument("-n", "--list-size")
.type(Integer.class)
.metavar("INTEGER")
.setDefault(10)
.help("Length of recommendation lists");
parser.addArgument("-r", "--rebuild-period")
.type(Long.class)
.setDefault(86400L)
.metavar("SECONDS")
.help("Rebuild Period for next build");
parser.addArgument("config")
.type(File.class)
.metavar("CONFIG")
.nargs("?")
.help("load algorithm configuration from CONFIG");
}
@Override
public String getName() {
return "simulate";
}
@Override
public String getHelp() {
return "Simulate a recommender algorithm over time";
}
@Override
public void execute(Namespace opts) throws IOException, RecommenderBuildException {
Context ctx = new Context(opts);
ScriptEnvironment environment = new ScriptEnvironment(opts);
InputData input = new InputData(environment, opts);
TemporalEvaluator eval = new TemporalEvaluator();
eval.setListSize(ctx.getListSize());
eval.setRebuildPeriod(ctx.getRebuildPeriod());
eval.setDataSource(input.getDAO());
File out = ctx.getOutputFile();
if (out != null) {
eval.setOutputFile(out);
}
out = ctx.getExtendedOutputFile();
if (out != null) {
eval.setExtendedOutputFile(out.toPath());
}
List<AlgorithmInstance> algos = AlgorithmInstance.load(ctx.getConfigFile().toPath(), "algorithm",
environment.getClassLoader());
if (algos.size() != 1) {
logger.error("expected 1 algorithm, found {}", algos.size());
throw new IllegalArgumentException("too many algorithms");
} else {
eval.setAlgorithm(algos.get(0));
}
Stopwatch timer = Stopwatch.createStarted();
logger.info("beginning temporal evaluator");
eval.execute();
timer.stop();
logger.info("evaluator executed in {}", timer);
}
private static class Context {
private final Namespace options;
public Context(Namespace opts) {
options = opts;
}
public File getOutputFile() {
return options.get("output_file");
}
public File getExtendedOutputFile() {
return options.get("extended_output");
}
public File getConfigFile() {
return options.get("config");
}
public long getRebuildPeriod() {
return options.get("rebuild_period");
}
public int getListSize() {
return options.get("list_size");
}
}
}
|
package br.senac.tads.pi3a.controller;
import br.senac.tads.pi3a.dao.DaoFuncionario;
import br.senac.tads.pi3a.inputFilter.InputFilterFuncionario;
import br.senac.tads.pi3a.model.Funcionario;
import br.senac.tads.pi3a.model.Model;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
*
* @author Fillipe
*/
public class ControllerFuncionario implements Logica {
@Override
public String novo(HttpServletRequest request, HttpServletResponse response,
HttpSession session) throws Exception {
try {
if (request.getMethod().equalsIgnoreCase("post")) {
InputFilterFuncionario inputFilterFuncionario
= new InputFilterFuncionario(request.getParameterMap());
Funcionario funcionario = (Funcionario) inputFilterFuncionario.getData();
if (inputFilterFuncionario.isValid()) {
funcionario = (Funcionario) inputFilterFuncionario.createModel();
DaoFuncionario dao = new DaoFuncionario(funcionario);
if (dao.findAll(funcionario, "cpf", "=", funcionario.getCpf()).isEmpty()) {
funcionario.setStatus(true);
if (dao.insert() != -1) {
session.setAttribute("alert", "alert-sucess");
session.setAttribute("alterMessage", "Cadastro realizado com sucesso.");
return "novo";
}
} else {
session.setAttribute("funcionario", funcionario);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage", "Este CPF já está cadastrado.");
}
} else {
session.setAttribute("errorValidation", inputFilterFuncionario.getErrorValidation());
session.setAttribute("funcionario", funcionario);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alert", "Verifique os campos em vermelho.");
}
}
return "/WEB-INF/jsp/cadastrar-funcionario.jsp";
} catch (Exception e) {
e.printStackTrace(System.err);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alterMessage", "Não foi possível realizar o cadastro.");
return "novo";
}
}
@Override
public String editar(HttpServletRequest request,
HttpServletResponse response, HttpSession session)
throws Exception {
try {
if (request.getMethod().equalsIgnoreCase("post")) {
InputFilterFuncionario inputFilterFuncionario
= new InputFilterFuncionario(request.getParameterMap());
Funcionario funcionario = (Funcionario) inputFilterFuncionario.getData();
if (inputFilterFuncionario.isValid()) {
funcionario = (Funcionario) inputFilterFuncionario.createModel();
DaoFuncionario dao = new DaoFuncionario(funcionario);
List<Model> lista = dao.findAll(funcionario, "cpf", "=",
funcionario.getCpf());
if (lista.size() == 1) {
if (lista.get(0).getId() == funcionario.getId()) {
if (dao.update()) {
session.setAttribute("alert", "alert-success");
session.setAttribute("alertMessage",
"Cadastro alterado com sucesso.");
session.setAttribute("id", funcionario.getId());
return "editar";
}
} else {
session.setAttribute("funcionario", funcionario);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Este CPF já está cadastrado.");
}
} else {
session.setAttribute("funcionario", funcionario);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Não foi encontrado nenhum cadastro com o CPF"
+ " informado.");
}
} else {
session.setAttribute("errorValidation",
inputFilterFuncionario.getErrorValidation());
session.setAttribute("funcionario", funcionario);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Verifique os campo em vermelho.");
}
}
if (request.getParameter("id") != null) {
String id = request.getParameter("id");
boolean digito = true;
for (int i = 0; i < id.length(); i++) {
if (!Character.isDigit(id.charAt(i))) {
digito = false;
break;
}
}
if (digito) {
Model funcionario = new Funcionario();
DaoFuncionario dao = new DaoFuncionario();
funcionario = dao.findOne(funcionario, Integer.valueOf(request
.getParameter("id")));
session.setAttribute("funcionario", funcionario);
}
}
return "/WEB-INF/jsp/cadastrar-funcionario.jsp";
} catch (Exception e) {
e.printStackTrace(System.err);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Não foi possível realizar a alteração.");
session.setAttribute("id", 0);
return "editar";
}
}
@Override
public String excluir(HttpServletRequest request,
HttpServletResponse response, HttpSession session)
throws Exception {
try {
if (request.getParameter("id") != null) {
String id = request.getParameter("id");
boolean digito = true;
for (int i = 0; i < id.length(); i++) {
if (!Character.isDigit(id.charAt(i))) {
digito = false;
break;
}
}
if (digito) {
Funcionario funcionario = new Funcionario();
DaoFuncionario dao = new DaoFuncionario(funcionario);
if (dao.delete(Integer.valueOf(id))) {
session.setAttribute("alert", "alert-warning");
session.setAttribute("alertMessage",
"Cadastro excluído com sucesso.");
return "excluir";
}
}
}
return "/WEB-INF/jsp/cadastrar-funcionario.jsp";
} catch (Exception e) {
e.printStackTrace(System.err);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Não foi possível realizar a exclusão.");
return "excluir";
}
}
@Override
public String pesquisar(HttpServletRequest request,
HttpServletResponse response, HttpSession session)
throws Exception {
try {
if (request.getMethod().equalsIgnoreCase("post")) {
Funcionario funcionario = new Funcionario();
DaoFuncionario dao = new DaoFuncionario();
List<Model> lista;
if (request.getParameter("pesquisar") != null
&& !request.getParameter("pesquisar").isEmpty()) {
String pesquisar = request.getParameter("pesquisar");
boolean digito = true;
for (int i = 0; i < pesquisar.length(); i++) {
if (!Character.isDigit(pesquisar.charAt(i))) {
digito = false;
break;
}
}
if (digito && pesquisar.length() == 11) {
lista = dao.findAll(funcionario, "cpf", "=", pesquisar);
} else {
lista = dao.findAll(funcionario, "nome", "LIKE",
"%" + pesquisar + "%");
}
} else {
lista = dao.findAll(funcionario);
}
if (lista != null && !lista.isEmpty()) {
session.setAttribute("listaFuncionarios", lista);
return "pesquisar";
} else {
session.setAttribute("alert", "alert-warning");
session.setAttribute("alertMessage",
"A consulta não retornou nenhum resultado.");
}
}
return "/WEB-INF/jsp/consultar-funcionario.jsp";
} catch (Exception e) {
e.printStackTrace(System.err);
session.setAttribute("alert", "alert-danger");
session.setAttribute("alertMessage",
"Não foi possível realizar a consulta.");
return "pesquisar";
}
}
}
|
package com.intellij.ide.lightEdit;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager;
import com.intellij.openapi.fileEditor.impl.EditorHistoryManager;
import com.intellij.openapi.fileTypes.PlainTextFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.EventDispatcher;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
public final class LightEditorManagerImpl implements LightEditorManager, Disposable {
private static final Logger LOG = Logger.getInstance(LightEditorManagerImpl.class);
private final List<LightEditorInfo> myEditors = new CopyOnWriteArrayList<>();
private final EventDispatcher<LightEditorListener> myEventDispatcher =
EventDispatcher.create(LightEditorListener.class);
private final LightEditServiceImpl myLightEditService;
final static Key<Boolean> NO_IMPLICIT_SAVE = Key.create("light.edit.no.implicit.save");
private final static String DEFAULT_FILE_NAME = "untitled_";
public LightEditorManagerImpl(LightEditServiceImpl service) {
myLightEditService = service;
}
private @Nullable LightEditorInfo doCreateEditor(@NotNull VirtualFile file) {
Project project = Objects.requireNonNull(LightEditUtil.getProject());
Pair<FileEditorProvider, FileEditor> pair = createFileEditor(project, file);
if (pair == null) {
return null;
}
LightEditorInfo editorInfo = new LightEditorInfoImpl(pair.first, pair.second, file);
ObjectUtils.consumeIfNotNull(EditorHistoryManager.getInstance(project).getState(file, pair.first),
state -> editorInfo.getFileEditor().setState(state));
ObjectUtils.consumeIfCast(LightEditorInfoImpl.getEditor(editorInfo), EditorImpl.class,
editorImpl -> editorImpl.setDropHandler(new LightEditDropHandler()));
myEditors.add(editorInfo);
project.getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER).fileOpened(
FileEditorManager.getInstance(project), file
);
return editorInfo;
}
private static @Nullable Pair<FileEditorProvider, FileEditor> createFileEditor(@NotNull Project project, @NotNull VirtualFile file) {
FileEditorProvider[] providers = FileEditorProviderManager.getInstance().getProviders(project, file);
for (FileEditorProvider provider : providers) {
FileEditor editor = provider.createEditor(project, file);
return Pair.create(provider, editor);
}
return null;
}
/**
* Create an empty editor without any file type assigned (defaults to plain text).
*
* @return The newly created editor info.
*/
@Override
public @NotNull LightEditorInfo createEditor() {
LightVirtualFile file = new LightVirtualFile(getUniqueName());
file.setFileType(PlainTextFileType.INSTANCE);
return Objects.requireNonNull(doCreateEditor(file));
}
@Override
public @Nullable LightEditorInfo createEditor(@NotNull VirtualFile file) {
LightEditFileTypeOverrider.markUnknownFileTypeAsPlainText(file);
setImplicitSaveEnabled(file, false);
LightEditorInfo editorInfo = doCreateEditor(file);
Editor editor = LightEditorInfoImpl.getEditor(editorInfo);
if (editor instanceof EditorEx) ((EditorEx)editor).setHighlighter(getHighlighter(file, editor));
return editorInfo;
}
private static void setImplicitSaveEnabled(@NotNull VirtualFile file, boolean isEnabled) {
Document document = FileDocumentManager.getInstance().getDocument(file);
if (document != null) {
document.putUserData(NO_IMPLICIT_SAVE, isEnabled ? null : true);
}
}
@Override
public void dispose() {
releaseEditors();
}
public void releaseEditors() {
myEditors.forEach(editorInfo -> ((LightEditorInfoImpl)editorInfo).disposeEditor());
myEditors.clear();
}
@Override
public void closeEditor(@NotNull LightEditorInfo editorInfo) {
EditorHistoryManager.getInstance(myLightEditService.getOrCreateProject()).updateHistoryEntry(editorInfo.getFile(), false);
myEditors.remove(editorInfo);
setImplicitSaveEnabled(editorInfo.getFile(), true);
((LightEditorInfoImpl)editorInfo).disposeEditor();
myEventDispatcher.getMulticaster().afterClose(editorInfo);
}
@Override
public void addListener(@NotNull LightEditorListener listener) {
myEventDispatcher.addListener(listener);
}
@Override
public void addListener(@NotNull LightEditorListener listener, @NotNull Disposable parent) {
myEventDispatcher.addListener(listener, parent);
}
void fireEditorSelected(@Nullable LightEditorInfo editorInfo) {
myEventDispatcher.getMulticaster().afterSelect(editorInfo);
}
void fireAutosaveModeChanged(boolean autosaveMode) {
myEventDispatcher.getMulticaster().autosaveModeChanged(autosaveMode);
}
void fireFileStatusChanged(@NotNull Collection<LightEditorInfo> editorInfos) {
myEventDispatcher.getMulticaster().fileStatusChanged(editorInfos);
}
@NotNull
private static EditorHighlighter getHighlighter(@NotNull VirtualFile file, @NotNull Editor editor) {
return EditorHighlighterFactory.getInstance().createEditorHighlighter(file, editor.getColorsScheme(), null);
}
int getEditorCount() {
return myEditors.size();
}
@Nullable
public LightEditorInfo findOpen(@NotNull VirtualFile file) {
return myEditors.stream()
.filter(editorInfo -> file.getPath().equals(editorInfo.getFile().getPath()))
.findFirst().orElse(null);
}
@Override
public boolean isImplicitSaveAllowed(@NotNull Document document) {
return LightEditService.getInstance().isAutosaveMode() ||
!ObjectUtils.notNull(document.getUserData(NO_IMPLICIT_SAVE), false);
}
@Override
@NotNull
public Collection<VirtualFile> getOpenFiles() {
return myEditors.stream().map(info -> info.getFile()).collect(Collectors.toSet());
}
@Override
public boolean isFileOpen(@NotNull VirtualFile file) {
return myEditors.stream().anyMatch(editorInfo -> file.equals(editorInfo.getFile()));
}
@Override
public boolean containsUnsavedDocuments() {
return myEditors.stream().anyMatch(editorInfo -> editorInfo.isUnsaved());
}
@NotNull
List<LightEditorInfo> getUnsavedEditors() {
return ContainerUtil.filter(myEditors, editorInfo -> editorInfo.isUnsaved());
}
private String getUniqueName() {
for (int i = 1; ; i++) {
String candidate = DEFAULT_FILE_NAME + i;
if (myEditors.stream().noneMatch(editorInfo -> editorInfo.getFile().getName().equals(candidate))) {
return candidate;
}
}
}
@Override
@NotNull
public LightEditorInfo saveAs(@NotNull LightEditorInfo info, @NotNull VirtualFile targetFile) {
LightEditorInfo newInfo = createEditor(targetFile);
if (newInfo != null) {
ApplicationManager.getApplication().runWriteAction(() -> {
FileDocumentManager manager = FileDocumentManager.getInstance();
Document source = manager.getDocument(info.getFile());
Document target = manager.getDocument(targetFile);
if (source == null) {
LOG.error("Cannot save to " + targetFile + ": no document found for " + info.getFile());
return;
}
if (target == null) {
LOG.error("Cannot save to " + targetFile + ": no document found for " + targetFile);
return;
}
targetFile.refresh(false, false); // to avoid memory-disk conflict if target file was changed externally
target.setText(source.getCharsSequence());
manager.saveDocument(target);
});
return newInfo;
}
return info;
}
@Nullable
LightEditorInfo getEditorInfo(@NotNull VirtualFile file) {
return myEditors.stream().filter(editorInfo -> file.equals(editorInfo.getFile())).findFirst().orElse(null);
}
}
|
package com.intellij.openapi.actionSystem.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.impl.actionholder.ActionRef;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.ui.JBPopupMenu;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.ui.ComponentUtil;
import com.intellij.ui.components.JBMenu;
import com.intellij.ui.mac.foundation.NSDefaults;
import com.intellij.ui.plaf.beg.IdeaMenuUI;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.SingleAlarm;
import com.intellij.util.ui.JBSwingUtilities;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import java.awt.*;
import java.awt.event.AWTEventListener;
import java.awt.event.ComponentEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
public final class ActionMenu extends JBMenu {
private static final boolean KEEP_MENU_HIERARCHY = SystemInfo.isMacSystemMenu && Registry.is("keep.menu.hierarchy", false);
private final String myPlace;
private DataContext myContext;
private final ActionRef<ActionGroup> myGroup;
private final PresentationFactory myPresentationFactory;
private final Presentation myPresentation;
private boolean myMnemonicEnabled;
private MenuItemSynchronizer myMenuItemSynchronizer;
private StubItem myStubItem; // A PATCH!!! Do not remove this code, otherwise you will lose all keyboard navigation in JMenuBar.
private final boolean myUseDarkIcons;
private Disposable myDisposable;
public ActionMenu(final DataContext context,
@NotNull final String place,
final ActionGroup group,
final PresentationFactory presentationFactory,
final boolean enableMnemonics,
final boolean useDarkIcons
) {
myContext = context;
myPlace = place;
myGroup = ActionRef.fromAction(group);
myPresentationFactory = presentationFactory;
myPresentation = myPresentationFactory.getPresentation(group);
myMnemonicEnabled = enableMnemonics;
myUseDarkIcons = useDarkIcons;
updateUI();
init();
// addNotify won't be called for menus in MacOS system menu
if (SystemInfo.isMacSystemMenu) {
installSynchronizer();
}
// Triggering initialization of private field "popupMenu" from JMenu with our own JBPopupMenu
getPopupMenu();
}
@Override
protected Graphics getComponentGraphics(Graphics graphics) {
if (!(getParent() instanceof JMenuBar)) return super.getComponentGraphics(graphics);
return JBSwingUtilities.runGlobalCGTransform(this, super.getComponentGraphics(graphics));
}
public void updateContext(DataContext context) {
myContext = context;
}
public AnAction getAnAction() { return myGroup.getAction(); }
@Override
public void addNotify() {
super.addNotify();
installSynchronizer();
}
private void installSynchronizer() {
if (myMenuItemSynchronizer == null) {
myMenuItemSynchronizer = new MenuItemSynchronizer();
myGroup.getAction().addPropertyChangeListener(myMenuItemSynchronizer);
myPresentation.addPropertyChangeListener(myMenuItemSynchronizer);
}
}
@Override
public void removeNotify() {
uninstallSynchronizer();
super.removeNotify();
if (myDisposable != null) {
Disposer.dispose(myDisposable);
myDisposable = null;
}
}
private void uninstallSynchronizer() {
if (myMenuItemSynchronizer != null) {
myGroup.getAction().removePropertyChangeListener(myMenuItemSynchronizer);
myPresentation.removePropertyChangeListener(myMenuItemSynchronizer);
myMenuItemSynchronizer = null;
}
}
private JPopupMenu mySpecialMenu;
@Override
public JPopupMenu getPopupMenu() {
if (mySpecialMenu == null) {
mySpecialMenu = new JBPopupMenu();
mySpecialMenu.setInvoker(this);
popupListener = createWinListener(mySpecialMenu);
ReflectionUtil.setField(JMenu.class, this, JPopupMenu.class, "popupMenu", mySpecialMenu);
}
return super.getPopupMenu();
}
@Override
public void updateUI() {
setUI(IdeaMenuUI.createUI(this));
setFont(UIUtil.getMenuFont());
JPopupMenu popupMenu = getPopupMenu();
if (popupMenu != null) {
popupMenu.updateUI();
}
}
private void init() {
boolean macSystemMenu = SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU);
myStubItem = macSystemMenu ? null : new StubItem();
addStubItem();
setBorderPainted(false);
MenuListenerImpl menuListener = new MenuListenerImpl();
addMenuListener(menuListener);
getModel().addChangeListener(menuListener);
setVisible(myPresentation.isVisible());
setEnabled(myPresentation.isEnabled());
setText(myPresentation.getText());
updateIcon();
setMnemonicEnabled(myMnemonicEnabled);
}
private void addStubItem() {
if (myStubItem != null) {
add(myStubItem);
}
}
public void setMnemonicEnabled(boolean enable) {
myMnemonicEnabled = enable;
setMnemonic(myPresentation.getMnemonic());
setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex());
}
@Override
public void setDisplayedMnemonicIndex(final int index) throws IllegalArgumentException {
super.setDisplayedMnemonicIndex(myMnemonicEnabled ? index : -1);
}
@Override
public void setMnemonic(int mnemonic) {
super.setMnemonic(myMnemonicEnabled ? mnemonic : 0);
}
private void updateIcon() {
UISettings settings = UISettings.getInstanceOrNull();
if (settings != null && settings.getShowIconsInMenus()) {
final Presentation presentation = myPresentation;
Icon icon = presentation.getIcon();
if (SystemInfo.isMacSystemMenu && ActionPlaces.MAIN_MENU.equals(myPlace) && icon != null) {
// JDK can't paint correctly our HiDPI icons at the system menu bar
icon = IconLoader.getMenuBarIcon(icon, myUseDarkIcons);
}
if (isShowIcons()) {
setIcon(null);
setDisabledIcon(null);
} else {
setIcon(icon);
if (presentation.getDisabledIcon() != null) {
setDisabledIcon(presentation.getDisabledIcon());
}
else {
setDisabledIcon(icon == null ? null : IconLoader.getDisabledIcon(icon));
}
}
}
}
static boolean isShowIcons() {
return SystemInfo.isMac && Registry.get("ide.macos.main.menu.alignment.options").isOptionEnabled("No icons");
}
static boolean isAligned() {
return SystemInfo.isMac && Registry.get("ide.macos.main.menu.alignment.options").isOptionEnabled("Aligned");
}
static boolean isAlignedInGroup() {
return SystemInfo.isMac && Registry.get("ide.macos.main.menu.alignment.options").isOptionEnabled("Aligned in group");
}
@Override
public void menuSelectionChanged(boolean isIncluded) {
super.menuSelectionChanged(isIncluded);
showDescriptionInStatusBar(isIncluded, this, myPresentation.getDescription());
}
public static void showDescriptionInStatusBar(boolean isIncluded, Component component, String description) {
IdeFrame frame = (IdeFrame)(component instanceof IdeFrame ? component : SwingUtilities.getAncestorOfClass(IdeFrame.class, component));
StatusBar statusBar;
if (frame != null && (statusBar = frame.getStatusBar()) != null) {
statusBar.setInfo(isIncluded ? description : null);
}
}
private class MenuListenerImpl implements ChangeListener, MenuListener {
boolean isSelected = false;
boolean myIsHidden = false;
@Override
public void stateChanged(ChangeEvent e) {
// Re-implement javax.swing.JMenu.MenuChangeListener to avoid recursive event notifications
// if 'menuSelected' fires unrelated 'stateChanged' event, without changing 'model.isSelected()' value.
ButtonModel model = (ButtonModel)e.getSource();
boolean modelSelected = model.isSelected();
if (modelSelected != isSelected) {
isSelected = modelSelected;
if (modelSelected) {
menuSelected();
}
else {
menuDeselected();
}
}
}
@Override
public void menuCanceled(MenuEvent e) {
onMenuHidden();
}
@Override
public void menuDeselected(MenuEvent e) {
// Use ChangeListener instead to guard against recursive calls
}
@Override
public void menuSelected(MenuEvent e) {
// Use ChangeListener instead to guard against recursive calls
}
private void menuDeselected() {
if (myDisposable != null) {
Disposer.dispose(myDisposable);
myDisposable = null;
}
onMenuHidden();
}
private void onMenuHidden() {
if (KEEP_MENU_HIERARCHY) {
return;
}
Runnable clearSelf = () -> {
clearItems();
addStubItem();
};
if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) {
// Menu items may contain mnemonic and they can affect key-event dispatching (when Alt pressed)
// To avoid influence of mnemonic it's necessary to clear items when menu was hidden.
// When user selects item of system menu (under MacOs) AppKit generates such sequence: CloseParentMenu -> PerformItemAction
// So we can destroy menu-item before item's action performed, and because of that action will not be executed.
// Defer clearing to avoid this problem.
Disposable listenerHolder = Disposer.newDisposable();
Disposer.register(ApplicationManager.getApplication(), listenerHolder);
IdeEventQueue.getInstance().addDispatcher(e -> {
if (e instanceof KeyEvent) {
if (myIsHidden) {
clearSelf.run();
}
ApplicationManager.getApplication().invokeLater(() -> Disposer.dispose(listenerHolder));
}
return false;
}, listenerHolder);
myIsHidden = true;
}
else {
clearSelf.run();
}
}
private void menuSelected() {
UsabilityHelper helper = new UsabilityHelper(ActionMenu.this);
if (myDisposable == null) {
myDisposable = Disposer.newDisposable();
}
Disposer.register(myDisposable, helper);
if (KEEP_MENU_HIERARCHY || myIsHidden) {
clearItems();
}
myIsHidden = false;
fillMenu();
}
}
public void clearItems() {
if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) {
for (Component menuComponent : getMenuComponents()) {
if (menuComponent instanceof ActionMenu) {
((ActionMenu)menuComponent).clearItems();
// hideNotify is not called on Macs
((ActionMenu)menuComponent).uninstallSynchronizer();
}
else if (menuComponent instanceof ActionMenuItem) {
// Looks like an old-fashioned ugly workaround
// JDK 1.7 on Mac works wrong with such functional keys
if (!SystemInfo.isMac) {
((ActionMenuItem)menuComponent).setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F24, 0));
}
}
}
}
removeAll();
validate();
}
public void fillMenu() {
DataContext context;
if (myContext != null) {
context = myContext;
}
else {
DataManager dataManager = DataManager.getInstance();
@SuppressWarnings("deprecation") DataContext contextFromFocus = dataManager.getDataContext();
context = contextFromFocus;
if (PlatformDataKeys.CONTEXT_COMPONENT.getData(context) == null) {
IdeFrame frame = ComponentUtil.getParentOfType((Class<? extends IdeFrame>)IdeFrame.class, (Component)this);
context = dataManager.getDataContext(IdeFocusManager.getGlobalInstance().getLastFocusedFor((Window)frame));
}
}
final boolean isDarkMenu = SystemInfo.isMacSystemMenu && NSDefaults.isDarkMenuBar();
Utils.fillMenu(myGroup.getAction(), this, myMnemonicEnabled, myPresentationFactory, context, myPlace, true, LaterInvocator.isInModalContext(), isDarkMenu);
}
private class MenuItemSynchronizer implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent e) {
String name = e.getPropertyName();
if (Presentation.PROP_VISIBLE.equals(name)) {
setVisible(myPresentation.isVisible());
if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) {
validate();
}
}
else if (Presentation.PROP_ENABLED.equals(name)) {
setEnabled(myPresentation.isEnabled());
}
else if (Presentation.PROP_MNEMONIC_KEY.equals(name)) {
setMnemonic(myPresentation.getMnemonic());
}
else if (Presentation.PROP_MNEMONIC_INDEX.equals(name)) {
setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex());
}
else if (Presentation.PROP_TEXT.equals(name)) {
setText(myPresentation.getText());
}
else if (Presentation.PROP_ICON.equals(name) || Presentation.PROP_DISABLED_ICON.equals(name)) {
updateIcon();
}
}
}
private static final class UsabilityHelper implements IdeEventQueue.EventDispatcher, AWTEventListener, Disposable {
private Component myComponent;
private Point myLastMousePoint;
private Point myUpperTargetPoint;
private Point myLowerTargetPoint;
private SingleAlarm myCallbackAlarm;
private MouseEvent myEventToRedispatch;
private long myLastEventTime = 0L;
private boolean myInBounds = false;
private SingleAlarm myCheckAlarm;
private UsabilityHelper(Component component) {
myCallbackAlarm = new SingleAlarm(() -> {
Disposer.dispose(myCallbackAlarm);
myCallbackAlarm = null;
if (myEventToRedispatch != null) {
IdeEventQueue.getInstance().dispatchEvent(myEventToRedispatch);
}
}, 50, ModalityState.any(), this);
myCheckAlarm = new SingleAlarm(() -> {
if (myLastEventTime > 0 && System.currentTimeMillis() - myLastEventTime > 1500) {
if (!myInBounds && myCallbackAlarm != null && !myCallbackAlarm.isDisposed()) {
myCallbackAlarm.request();
}
}
myCheckAlarm.request();
}, 100, ModalityState.any(), this);
myComponent = component;
PointerInfo info = MouseInfo.getPointerInfo();
myLastMousePoint = info != null ? info.getLocation() : null;
if (myLastMousePoint != null) {
Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.COMPONENT_EVENT_MASK);
IdeEventQueue.getInstance().addDispatcher(this, this);
}
}
@Override
public void eventDispatched(AWTEvent event) {
if (event instanceof ComponentEvent) {
ComponentEvent componentEvent = (ComponentEvent)event;
Component component = componentEvent.getComponent();
JPopupMenu popup = ComponentUtil.getParentOfType((Class<? extends JPopupMenu>)JPopupMenu.class, component);
if (popup != null && popup.getInvoker() == myComponent && popup.isShowing()) {
Rectangle bounds = popup.getBounds();
if (bounds.isEmpty()) return;
bounds.setLocation(popup.getLocationOnScreen());
if (myLastMousePoint.x < bounds.x) {
myUpperTargetPoint = new Point(bounds.x, bounds.y);
myLowerTargetPoint = new Point(bounds.x, bounds.y + bounds.height);
}
if (myLastMousePoint.x > bounds.x + bounds.width) {
myUpperTargetPoint = new Point(bounds.x + bounds.width, bounds.y);
myLowerTargetPoint = new Point(bounds.x + bounds.width, bounds.y + bounds.height);
}
}
}
}
@Override
public boolean dispatch(@NotNull AWTEvent e) {
if (e instanceof MouseEvent && myUpperTargetPoint != null && myLowerTargetPoint != null && myCallbackAlarm != null) {
if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) {
return false;
}
Point point = ((MouseEvent)e).getLocationOnScreen();
Rectangle bounds = myComponent.getBounds();
bounds.setLocation(myComponent.getLocationOnScreen());
myInBounds = bounds.contains(point);
boolean isMouseMovingTowardsSubmenu = myInBounds || new Polygon(
new int[]{myLastMousePoint.x, myUpperTargetPoint.x, myLowerTargetPoint.x},
new int[]{myLastMousePoint.y, myUpperTargetPoint.y, myLowerTargetPoint.y},
3).contains(point);
myEventToRedispatch = (MouseEvent)e;
myLastEventTime = System.currentTimeMillis();
if (!isMouseMovingTowardsSubmenu) {
myCallbackAlarm.request();
} else {
myCallbackAlarm.cancel();
}
myLastMousePoint = point;
return true;
}
return false;
}
@Override
public void dispose() {
myComponent = null;
myEventToRedispatch = null;
myLastMousePoint = myUpperTargetPoint = myLowerTargetPoint = null;
Toolkit.getDefaultToolkit().removeAWTEventListener(this);
}
}
}
|
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.lightEdit.LightEdit;
import com.intellij.ide.lightEdit.LightEditService;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.impl.DockableEditorTabbedContainer;
import com.intellij.openapi.fileEditor.impl.EditorWithProviderComposite;
import com.intellij.openapi.fileEditor.impl.EditorsSplitters;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.ui.docking.DockContainer;
import com.intellij.ui.docking.DockManager;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author Kirill Likhodedov
*/
public final class StatusBarUtil {
private static final Logger LOG = Logger.getInstance(StatusBar.class);
private StatusBarUtil() { }
@Nullable
public static Editor getCurrentTextEditor(@Nullable StatusBar statusBar) {
if (statusBar == null) return null;
FileEditor fileEditor = getCurrentFileEditor(statusBar);
if (fileEditor instanceof TextEditor) {
Editor editor = ((TextEditor)fileEditor).getEditor();
return ensureValidEditorFile(editor, fileEditor) ? editor : null;
}
return null;
}
/**
* Finds the current file editor.
*/
@Nullable
public static FileEditor getCurrentFileEditor(@Nullable StatusBar statusBar) {
if (statusBar == null) {
return null;
}
Project project = statusBar.getProject();
if (project == null) {
return null;
}
if (LightEdit.owns(project)) {
return LightEditService.getInstance().getSelectedFileEditor();
}
DockContainer c = DockManager.getInstance(project).getContainerFor(statusBar.getComponent());
EditorsSplitters splitters = null;
if (c instanceof DockableEditorTabbedContainer) {
splitters = ((DockableEditorTabbedContainer)c).getSplitters();
}
if (splitters != null && splitters.getCurrentWindow() != null) {
EditorWithProviderComposite editor = splitters.getCurrentWindow().getSelectedEditor();
if (editor != null) {
return editor.getSelectedWithProvider().getFileEditor();
}
}
return null;
}
public static void setStatusBarInfo(@NotNull Project project, @NotNull @NlsContexts.StatusBarText String message) {
StatusBar statusBar = WindowManager.getInstance().getStatusBar(project);
if (statusBar != null) {
statusBar.setInfo(message);
}
}
private static boolean ensureValidEditorFile(@NotNull Editor editor, @Nullable FileEditor fileEditor) {
Document document = editor.getDocument();
VirtualFile file = FileDocumentManager.getInstance().getFile(document);
if (file != null && !file.isValid()) {
Document cachedDocument = FileDocumentManager.getInstance().getCachedDocument(file);
Project project = editor.getProject();
Boolean fileIsOpen = project == null ? null : ArrayUtil.contains(file, FileEditorManager.getInstance(project).getOpenFiles());
LOG.error("Returned editor for invalid file: " + editor +
"; disposed=" + editor.isDisposed() +
(fileEditor == null ? "" : "; fileEditor=" + fileEditor + "; fileEditor.valid=" + fileEditor.isValid()) +
"; file " + file.getClass() +
"; cached document exists: " + (cachedDocument != null) +
"; same as document: " + (cachedDocument == document) +
"; file is open: " + fileIsOpen);
return false;
}
return true;
}
}
|
package org.motechproject.server.osgi;
import org.motechproject.server.config.monitor.ConfigFileMonitor;
import org.motechproject.server.startup.StartupManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
public class OsgiListener implements ServletContextListener {
private static final Logger LOGGER = LoggerFactory.getLogger(OsgiListener.class);
public static final String ADMIN_BUNDLE = "org.motechproject.motech-admin-bundle";
private static OsgiFrameworkService service;
private StartupManager startupManager = StartupManager.getInstance();
private ConfigFileMonitor configFileMonitor;
@Override
public void contextInitialized(ServletContextEvent servletContextEvent) {
LOGGER.debug("Starting OSGi framework...");
getOsgiService(servletContextEvent).start();
LOGGER.debug("Starting MoTeCH...");
startupManager.startup();
if (startupManager.canLaunchBundles()) {
LOGGER.info("Monitoring config file...");
getConfigFileMonitor(servletContextEvent).monitor();
LOGGER.info("Launching MOTECH bundles...");
getOsgiService().startMotechBundles();
} else {
LOGGER.warn("Problems with MoTeCH launch. Finding and launching Admin UI bundle to repair errors by user...");
if (!getOsgiService().startBundle(ADMIN_BUNDLE)) {
LOGGER.error("Admin UI bundle not found. Shutting down MOTECH platform...");
getOsgiService().stop();
}
}
}
@Override
public void contextDestroyed(ServletContextEvent servletContextEvent) {
getOsgiService(servletContextEvent).stop();
}
private OsgiFrameworkService getOsgiService(ServletContextEvent servletContextEvent) {
if (service == null) {
LOGGER.debug("Finding OsgiService instance in context...");
ServletContext servletContext = servletContextEvent.getServletContext();
ApplicationContext applicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
service = applicationContext.getBean(OsgiFrameworkService.class);
}
return service;
}
private ConfigFileMonitor getConfigFileMonitor(ServletContextEvent servletContextEvent) {
if (configFileMonitor == null) {
LOGGER.debug("Finding ConfigFileMonitor instance in context...");
ServletContext servletContext = servletContextEvent.getServletContext();
ApplicationContext applicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
configFileMonitor = applicationContext.getBean(ConfigFileMonitor.class);
}
return configFileMonitor;
}
public static OsgiFrameworkService getOsgiService() {
return service;
}
}
|
package com.intellij.execution.junit;
import com.intellij.execution.actions.ConfigurationContext;
import com.intellij.execution.actions.ConfigurationFromContext;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.testframework.AbstractTestProxy;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.GlobalSearchScope;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.Objects;
public class UniqueIdConfigurationProducer extends JUnitConfigurationProducer {
@Override
protected boolean setupConfigurationFromContext(@NotNull JUnitConfiguration configuration,
@NotNull ConfigurationContext context,
@NotNull Ref<PsiElement> sourceElement) {
String[] nodeIds = getNodeIds(context);
if (nodeIds == null || nodeIds.length == 0) return false;
final JUnitConfiguration.Data data = configuration.getPersistentData();
data.setUniqueIds(nodeIds);
data.TEST_OBJECT = JUnitConfiguration.TEST_UNIQUE_ID;
AbstractTestProxy selectedProxy = context.getDataContext().getData(AbstractTestProxy.DATA_KEY);
if (selectedProxy != null) {
configuration.setName(selectedProxy.getName());
}
else {
configuration.setGeneratedName();
}
setupConfigurationModule(context, configuration);
return true;
}
public static String[] getNodeIds(ConfigurationContext context) {
DataContext dataContext = context.getDataContext();
AbstractTestProxy[] testProxies = dataContext.getData(AbstractTestProxy.DATA_KEYS);
if (testProxies == null) return null;
RunConfiguration runConfiguration = dataContext.getData(RunConfiguration.DATA_KEY);
if (!(runConfiguration instanceof JUnitConfiguration)) return null;
Module module = ((JUnitConfiguration)runConfiguration).getConfigurationModule().getModule();
Project project = context.getProject();
GlobalSearchScope searchScope =
module != null ? GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(module) : GlobalSearchScope.projectScope(project);
if (!JUnitUtil.isJUnit5(searchScope, project)) return null;
return
Arrays.stream(testProxies).map(testProxy -> TestUniqueId.getEffectiveNodeId(testProxy, project, searchScope))
.filter(Objects::nonNull)
.toArray(String[]::new);
}
@Override
protected boolean isApplicableTestType(String type, ConfigurationContext context) {
return JUnitConfiguration.TEST_UNIQUE_ID.equals(type);
}
//prefer to method
@Override
public boolean shouldReplace(@NotNull ConfigurationFromContext self, @NotNull ConfigurationFromContext other) {
return self.isProducedBy(UniqueIdConfigurationProducer.class) && (other.isProducedBy(TestInClassConfigurationProducer.class) || other.isProducedBy(PatternConfigurationProducer.class));
}
}
|
package org.muml.eloquent.ocl.vql;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import org.eclipse.ocl.pivot.Operation;
import org.eclipse.ocl.pivot.OperationCallExp;
import org.eclipse.ocl.pivot.SetType;
import org.eclipse.ocl.pivot.TupleType;
import org.eclipse.ocl.pivot.TypedElement;
import org.eclipse.ocl.pivot.evaluation.Executor;
import org.eclipse.ocl.pivot.ids.TuplePartId;
import org.eclipse.ocl.pivot.ids.TupleTypeId;
import org.eclipse.ocl.pivot.internal.utilities.EnvironmentFactoryInternal;
import org.eclipse.ocl.pivot.library.AbstractOperation;
import org.eclipse.ocl.pivot.utilities.ClassUtil;
import org.eclipse.ocl.pivot.values.InvalidValueException;
import org.eclipse.ocl.pivot.values.SetValue;
import org.eclipse.ocl.pivot.values.TupleValue;
import org.eclipse.viatra.query.runtime.api.IPatternMatch;
import org.eclipse.viatra.query.runtime.api.IQuerySpecification;
import org.eclipse.viatra.query.runtime.api.ViatraQueryEngine;
import org.eclipse.viatra.query.runtime.api.ViatraQueryMatcher;
import org.eclipse.viatra.query.runtime.emf.EMFScope;
import org.eclipse.viatra.query.runtime.exception.ViatraQueryException;
public class VQLOperation extends AbstractOperation {
private static final String unexpectedNumberOfArguments =
"Expected at least two arguments (got: %d)";
private static final String invalidContextObject =
"contextObject has to be a non-null EObject (got: {0})";
private String matcherClassFQN;
public VQLOperation(@NonNull String matcherClassFQN) {
this.matcherClassFQN = matcherClassFQN;
}
@Override
public @Nullable Object dispatch(@NonNull Executor executor,
@NonNull OperationCallExp callExp,
@Nullable Object sourceValue) {
throw new UnsupportedOperationException();
}
@Override
public @Nullable Object basicEvaluate(@NonNull Executor executor,
@NonNull TypedElement caller,
@Nullable Object @NonNull [] sourceAndArgumentValues) {
if (sourceAndArgumentValues.length < 2) {
// should never happen...
throw new IllegalStateException(
String.format(unexpectedNumberOfArguments,
sourceAndArgumentValues.length));
}
Object contextObject = sourceAndArgumentValues[1];
if (contextObject == null || !(contextObject instanceof EObject)) {
// cannot do much without a context object
throw new InvalidValueException(invalidContextObject,
contextObject);
}
int numArgs = sourceAndArgumentValues.length - 2;
Object[] args = new Object[numArgs];
System.arraycopy(sourceAndArgumentValues, 2, args, 0, numArgs);
Collection<? extends IPatternMatch> matchResults = performMatch((EObject) contextObject,
executor, args);
Operation operation = ((OperationCallExp) caller)
.getReferredOperation();
return createResultSet(operation, matchResults);
}
protected @NonNull IQuerySpecification<ViatraQueryMatcher<IPatternMatch>> loadQuerySpecification(@NonNull Executor executor) {
List<ClassLoader> classLoaders = ((EnvironmentFactoryInternal) executor.getEnvironmentFactory())
.getMetamodelManager().getImplementationManager().getClassLoaders();
Class<?> clazz = null;
for (ClassLoader classLoader : classLoaders) {
try {
clazz = classLoader.loadClass(matcherClassFQN);
} catch (ClassNotFoundException e) {}
}
if (clazz == null) {
throw new IllegalStateException("unable to get query");
}
Object querySpecification = null;
try {
Method method = clazz.getMethod("querySpecification");
querySpecification = method.invoke(null);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException(e);
}
if (querySpecification == null) {
throw new IllegalStateException("querySpecification is null");
}
// using the IQuerySpecification<?> will not work, because it is not
// possible to call matcher.getAllMatches(partialMatch) later
@SuppressWarnings("unchecked")
IQuerySpecification<ViatraQueryMatcher<IPatternMatch>> cast =
(IQuerySpecification<ViatraQueryMatcher<IPatternMatch>>) querySpecification;
return cast;
}
protected @NonNull Collection<@Nullable ? extends IPatternMatch> performMatch(@NonNull EObject contextObject,
@NonNull Executor executor,
@Nullable Object @NonNull [] sourceAndArgumentValues) {
IQuerySpecification<? extends ViatraQueryMatcher<IPatternMatch>> querySpecification = loadQuerySpecification(executor);
ViatraQueryMatcher<IPatternMatch> matcher;
try {
ViatraQueryEngine engine = ViatraQueryEngine.on(new EMFScope(contextObject));
matcher = engine.getMatcher(querySpecification);
} catch (ViatraQueryException e) {
throw new RuntimeException(e);
}
Collection<IPatternMatch> matchResults;
if (sourceAndArgumentValues.length > 0) {
IPatternMatch partialMatch = matcher.newMatch(sourceAndArgumentValues);
matchResults = matcher.getAllMatches(partialMatch);
} else {
matchResults = matcher.getAllMatches();
}
return ClassUtil.nonNull(matchResults);
}
protected @NonNull SetValue createResultSet(@NonNull Operation operation,
@NonNull Collection<@Nullable ? extends IPatternMatch> matchResults) {
Set<@NonNull TupleValue> resultSet = new HashSet<TupleValue>(matchResults.size());
SetType setType = (SetType) operation.getType();
TupleTypeId tupleTypeId = ((TupleType) setType.getElementType())
.getTupleTypeId();
for (IPatternMatch match : matchResults) {
Map<@NonNull TuplePartId, @Nullable Object> partToValueMap = new HashMap<TuplePartId, Object>();
for (TuplePartId partId : tupleTypeId.getPartIds()) {
String partName = partId.getName();
Object value = ClassUtil.nonNull(match).get(partName);
partToValueMap.put(partId, value);
}
TupleValue tupleValue = createTupleValue(tupleTypeId,
partToValueMap);
resultSet.add(tupleValue);
}
return createSetValue(setType.getTypeId(), resultSet);
}
}
|
package org.languagetool.rules.en;
import java.util.ResourceBundle;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.Language;
import org.languagetool.rules.Example;
import org.languagetool.rules.WordRepeatRule;
/**
* Word repeat rule for English, to avoid false alarms in the generic word repetition rule.
*/
public class EnglishWordRepeatRule extends WordRepeatRule {
public EnglishWordRepeatRule(ResourceBundle messages, Language language) {
super(messages, language);
addExamplePair(Example.wrong("This <marker>is is</marker> just an example sentence."),
Example.fixed("This <marker>is</marker> just an example sentence."));
}
@Override
public String getId() {
return "ENGLISH_WORD_REPEAT_RULE";
}
@Override
public boolean ignore(AnalyzedTokenReadings[] tokens, int position) {
if (position == 0) {
return false;
}
if (wordRepetitionOf("had", tokens, position) && posIsIn(tokens, position - 2, "PRP")) {
return true; // "If I had had time, I would have gone to see him."
}
if (wordRepetitionOf("that", tokens, position) && posIsIn(tokens, position+1, "NN", "PRP$", "JJ", "VBZ", "VBD")) {
return true; // "I don't think that that is a problem."
}
if (wordRepetitionOf("can", tokens, position) && posIsIn(tokens, position-1, "NN")) {
return true; // "The can can hold the water."
}
if (wordRepetitionOf("blah", tokens, position)) {
return true; // "blah blah"
}
if (wordRepetitionOf("yadda", tokens, position)) {
return true; // "yadda yadda"
}
if (wordRepetitionOf("Pago", tokens, position)) {
return true; // "Pago Pago"
}
if (wordRepetitionOf("Wagga", tokens, position)) {
return true; // "Wagga Wagga"
}
if (wordRepetitionOf("Duran", tokens, position)) {
return true; // "Duran Duran"
}
if (wordRepetitionOf("sapiens", tokens, position)) {
return true; // "Homo sapiens sapiens"
}
if (wordRepetitionOf("tse", tokens, position)) {
return true; // "tse tse"
}
if (wordRepetitionOf("Li", tokens, position)) {
return true; // "Li Li", Chinese name
}
if (tokens[position].getToken().endsWith("ay")) {
if (tokens[position - 1].getToken().equals("may") && tokens[position].getToken().equals("May")) {
return true; // "may May"
}
if (tokens[position - 1].getToken().equals("May") && tokens[position].getToken().equals("may")) {
return true; // "May may"
}
if (tokens[1].getToken().equals("May") && tokens[2].getToken().equals("May")) {
return true; // "May May" SENT_START
}
}
if (tokens[position].getToken().endsWith("ill")) {
if (position > 0 && tokens[position - 1].getToken().equals("will") && tokens[position].getToken().equals("Will")) {
return true; // "will Will"
}
if (tokens[position - 1].getToken().equals("Will") && tokens[position].getToken().equals("will")) {
return true; // "Will will"
}
if (tokens[1].getToken().equals("Will") && tokens[2].getToken().equals("Will")) {
return true; // "Will Will" SENT_START
}
}
return false;
}
private boolean posIsIn(AnalyzedTokenReadings[] tokens, int position, String... posTags) {
if (position >= 0 && position < tokens.length) {
for (String posTag : posTags) {
if (tokens[position].hasPartialPosTag(posTag)) {
return true;
}
}
}
return false;
}
private boolean wordRepetitionOf(String word, AnalyzedTokenReadings[] tokens, int position) {
return tokens[position - 1].getToken().equals(word) && tokens[position].getToken().equals(word);
}
}
|
package com.opengamma.engine.function.resolver;
import java.util.Set;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CompiledFunctionDefinition;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.ParameterizedFunction;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.PublicAPI;
/**
* Advertises a function to a {@link CompiledFunctionResolver}.
*/
@PublicAPI
public class ResolutionRule {
/**
* The parameterized function.
*/
private final ParameterizedFunction _parameterizedFunction;
/**
* The target filter.
*/
private final ComputationTargetFilter _computationTargetFilter;
/**
* The priority.
*/
private final int _priority;
/**
* Creates an instance.
*
* @param function the function, not null
* @param computationTargetFilter the filter, not null
* @param priority the priority
*/
public ResolutionRule(ParameterizedFunction function, ComputationTargetFilter computationTargetFilter, int priority) {
ArgumentChecker.notNull(function, "function");
ArgumentChecker.notNull(computationTargetFilter, "computationTargetFilter");
_parameterizedFunction = function;
_computationTargetFilter = computationTargetFilter;
_priority = priority;
}
/**
* Gets the parameterized function.
*
* @return the function this rule is advertising, not null
*/
public ParameterizedFunction getFunction() {
return _parameterizedFunction;
}
/**
* Gets the filter that the rule uses.
*
* @return the filter in use, not null
*/
public ComputationTargetFilter getComputationTargetFilter() {
return _computationTargetFilter;
}
/**
* Gets the priority of the rule.
* If multiple rules can produce a given output, the one with the highest priority is chosen.
*
* @return the priority
*/
public int getPriority() {
return _priority;
}
/**
* The function advertised by this rule can validly produce the desired
* output only if:
* <ol>
* <li>The function can produce the output; and
* <li>This resolution rule applies to the given computation target
* </ol>
* <p>
* The implementation has been split into two accessible components to allow
* a resolver to cache the intermediate results. This is more efficient than
* repeated calls to this method.
*
* @param output Output you want the function to produce
* @param target Computation target
* @param context Function compilation context
* @return Null if this the function advertised by this rule cannot produce
* the desired output, a valid ValueSpecification otherwise - as returned by
* the function. The specification is not composed against the requirement
* constraints.
*/
public ValueSpecification getResult(ValueRequirement output, ComputationTarget target, FunctionCompilationContext context) {
final Set<ValueSpecification> resultSpecs = getResults(target, context);
if (resultSpecs == null) {
return null;
}
return getResult(output, target, resultSpecs);
}
/**
* The first half of the full {@link #getResult(ValueRequirement,ComputationTarget,FunctionCompilationContext)} implementation
* returning the set of all function outputs for use by {@link #getResult(ValueRequirement,ComputationTarget,FunctionCompilationContext,Set)}.
*
* @param target the computation target
* @param context Function compilation context
* @return the set of all value specifications produced by the function, null if none can be produced
*/
public Set<ValueSpecification> getResults(final ComputationTarget target, final FunctionCompilationContext context) {
final CompiledFunctionDefinition function = _parameterizedFunction.getFunction();
// check the function can apply to the target
if (!function.canApplyTo(context, target)) {
return null;
}
// return the maximal set of results the function can produce for the target
return function.getResults(context, target);
}
/**
* The second half of the full
* {@link #getResult(ValueRequirement, ComputationTarget, FunctionCompilationContext)})
* implementation taking the set of all function outputs produced by {@link #getResults}.
*
* @param output Output you want the function to produce
* @param target Computation target
* @param resultSpecs The results from {@code getResults()}, not null
* @return Null if this the function advertised by this rule cannot produce
* the desired output, a valid ValueSpecification otherwise - as returned by
* the function. The specification is not composed against the requirement
* constraints.
*/
public ValueSpecification getResult(final ValueRequirement output, final ComputationTarget target, final Set<ValueSpecification> resultSpecs) {
// Of the maximal outputs, is one valid for the requirement
ValueSpecification validSpec = null;
for (ValueSpecification resultSpec : resultSpecs) {
//s_logger.debug("Considering {} for {}", resultSpec, output);
if (output.isSatisfiedBy(resultSpec)) {
validSpec = resultSpec;
break;
}
}
if (validSpec == null) {
return null;
}
// Apply the target filter for this rule (this is applied last because filters probably rarely exclude compared to the other tests)
if (!_computationTargetFilter.accept(target)) {
return null;
}
return validSpec;
}
@Override
public String toString() {
return "ResolutionRule[" + getFunction() + " at priority " + getPriority() + "]";
}
}
|
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// File created: 2011-06-23 13:22:53
package fi.tkk.ics.hadoop.bam.cli.plugins;
import java.io.File;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumFileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import net.sf.picard.sam.ReservedTagConstants;
import net.sf.samtools.util.BlockCompressedStreamConstants;
import fi.tkk.ics.hadoop.bam.custom.hadoop.InputSampler;
import fi.tkk.ics.hadoop.bam.custom.hadoop.TotalOrderPartitioner;
import fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser;
import fi.tkk.ics.hadoop.bam.custom.samtools.BAMFileWriter;
import fi.tkk.ics.hadoop.bam.custom.samtools.SamFileHeaderMerger;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileHeader;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileReader;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileWriterImpl;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMTextWriter;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMRecord;
import static fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser.Option.*;
import fi.tkk.ics.hadoop.bam.AnySAMInputFormat;
import fi.tkk.ics.hadoop.bam.AnySAMOutputFormat;
import fi.tkk.ics.hadoop.bam.KeyIgnoringAnySAMOutputFormat;
import fi.tkk.ics.hadoop.bam.SAMFormat;
import fi.tkk.ics.hadoop.bam.SAMRecordWritable;
import fi.tkk.ics.hadoop.bam.cli.CLIPlugin;
import fi.tkk.ics.hadoop.bam.cli.Utils;
import fi.tkk.ics.hadoop.bam.util.Pair;
import fi.tkk.ics.hadoop.bam.util.Timer;
public final class Sort extends CLIPlugin {
private static final List<Pair<CmdLineParser.Option, String>> optionDescs
= new ArrayList<Pair<CmdLineParser.Option, String>>();
private static final CmdLineParser.Option
verboseOpt = new BooleanOption('v', "verbose"),
outputFileOpt = new StringOption('o', "output-file=PATH"),
samOutputOpt = new BooleanOption('s', "output-sam"),
noTrustExtsOpt = new BooleanOption("no-trust-exts");
public Sort() {
super("sort", "BAM and SAM sorting and merging", "3.0",
"WORKDIR INPATH [INPATH...]",
optionDescs,
"Merges together the BAM and SAM files in the INPATHs, sorting the "+
"result, in a distributed fashion using Hadoop. Output parts are "+
"placed in WORKDIR in, by default, headerless BAM format.");
}
static {
optionDescs.add(new Pair<CmdLineParser.Option, String>(
verboseOpt, "tell the Hadoop job to be more verbose"));
optionDescs.add(new Pair<CmdLineParser.Option, String>(
outputFileOpt, "output a complete SAM/BAM file to the file PATH, "+
"removing the parts from WORKDIR; SAM/BAM is chosen "+
"by file extension, if appropriate (overrides -s)"));
optionDescs.add(new Pair<CmdLineParser.Option, String>(
noTrustExtsOpt, "detect SAM/BAM files only by contents, "+
"never by file extension"));
optionDescs.add(new Pair<CmdLineParser.Option, String>(
samOutputOpt, "output SAM instead of BAM"));
}
@Override protected int run(CmdLineParser parser) {
final List<String> args = parser.getRemainingArgs();
if (args.isEmpty()) {
System.err.println("sort :: WORKDIR not given.");
return 3;
}
if (args.size() == 1) {
System.err.println("sort :: INPATH not given.");
return 3;
}
final String wrkDir = args.get(0),
out = (String)parser.getOptionValue(outputFileOpt);
final List<String> strInputs = args.subList(1, args.size());
final List<Path> inputs = new ArrayList<Path>(strInputs.size());
for (final String in : strInputs)
inputs.add(new Path(in));
final boolean verbose = parser.getBoolean(verboseOpt);
final String intermediateOutName =
(out == null ? inputs.get(0) : new Path(out)).getName();
final Configuration conf = getConf();
SAMFormat format = null;
if (out != null)
format = SAMFormat.inferFromFilePath(out);
if (format == null)
format = parser.getBoolean(samOutputOpt)
? SAMFormat.SAM : SAMFormat.BAM;
conf.set(AnySAMOutputFormat.OUTPUT_SAM_FORMAT_PROPERTY,
format.toString());
conf.setBoolean(AnySAMInputFormat.TRUST_EXTS_PROPERTY,
!parser.getBoolean(noTrustExtsOpt));
// Used by getHeaderMerger. SortRecordReader needs it to correct the
// reference indices when the output has a different index and
// SortOutputFormat needs it to have the correct header for the output
// records.
conf.setStrings(INPUT_PATHS_PROP, strInputs.toArray(new String[0]));
// Used by SortOutputFormat to name the output files.
conf.set(SortOutputFormat.OUTPUT_NAME_PROP, intermediateOutName);
final Path wrkDirPath = new Path(wrkDir);
final Timer t = new Timer();
try {
for (final Path in : inputs)
Utils.configureSampling(in, conf);
// As far as I can tell there's no non-deprecated way of getting this
// info. We can silence this warning but not the import.
@SuppressWarnings("deprecation")
final int maxReduceTasks =
new JobClient(new JobConf(conf)).getClusterStatus()
.getMaxReduceTasks();
conf.setInt("mapred.reduce.tasks", Math.max(1, maxReduceTasks*9/10));
final Job job = new Job(conf);
job.setJarByClass (Sort.class);
job.setMapperClass (Mapper.class);
job.setReducerClass(SortReducer.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setOutputKeyClass (NullWritable.class);
job.setOutputValueClass (SAMRecordWritable.class);
job.setInputFormatClass (SortInputFormat.class);
job.setOutputFormatClass(SortOutputFormat.class);
for (final Path in : inputs)
FileInputFormat.addInputPath(job, in);
FileOutputFormat.setOutputPath(job, wrkDirPath);
job.setPartitionerClass(TotalOrderPartitioner.class);
System.out.println("sort :: Sampling...");
t.start();
InputSampler.<LongWritable,SAMRecordWritable>writePartitionFile(
job,
new InputSampler.IntervalSampler<LongWritable,SAMRecordWritable>(
0.01, 100));
System.out.printf("sort :: Sampling complete in %d.%03d s.\n",
t.stopS(), t.fms());
job.submit();
System.out.println("sort :: Waiting for job completion...");
t.start();
if (!job.waitForCompletion(verbose)) {
System.err.println("sort :: Job failed.");
return 4;
}
System.out.printf("sort :: Job complete in %d.%03d s.\n",
t.stopS(), t.fms());
} catch (IOException e) {
System.err.printf("sort :: Hadoop error: %s\n", e);
return 4;
} catch (ClassNotFoundException e) { throw new RuntimeException(e); }
catch (InterruptedException e) { throw new RuntimeException(e); }
if (out != null) try {
System.out.println("sort :: Merging output...");
t.start();
final Path outPath = new Path(out);
final FileSystem srcFS = wrkDirPath.getFileSystem(conf);
FileSystem dstFS = outPath.getFileSystem(conf);
// The checksummed local file system doesn't support append().
if (dstFS instanceof LocalFileSystem
&& dstFS instanceof ChecksumFileSystem)
dstFS = ((LocalFileSystem)dstFS).getRaw();
// First, place the BAM header.
final SAMFileWriterImpl w;
switch (format) {
case BAM:
w = new BAMFileWriter(dstFS.create(outPath), new File(""));
break;
case SAM:
w = new SAMTextWriter(dstFS.create(outPath));
break;
default: assert false; w = null;
}
w.setSortOrder(SAMFileHeader.SortOrder.coordinate, true);
w.setHeader(getHeaderMerger(conf).getMergedHeader());
w.close();
// Then, the actual SAM or BAM contents.
final OutputStream outs = dstFS.append(outPath);
final FileStatus[] parts = srcFS.globStatus(new Path(
wrkDir, conf.get(SortOutputFormat.OUTPUT_NAME_PROP) +
"-[0-9][0-9][0-9][0-9][0-9][0-9]*"));
{int i = 0;
final Timer t2 = new Timer();
for (final FileStatus part : parts) {
t2.start();
final InputStream ins = srcFS.open(part.getPath());
IOUtils.copyBytes(ins, outs, conf, false);
ins.close();
System.out.printf("sort :: Merged part %d in %d.%03d s.\n",
++i, t2.stopS(), t2.fms());
}}
for (final FileStatus part : parts)
srcFS.delete(part.getPath(), false);
// And if BAM, the BGZF terminator.
if (format == SAMFormat.BAM)
outs.write(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
outs.close();
System.out.printf("sort :: Merging complete in %d.%03d s.\n",
t.stopS(), t.fms());
} catch (IOException e) {
System.err.printf("sort :: Output merging failed: %s\n", e);
return 5;
}
return 0;
}
private static final String INPUT_PATHS_PROP = "hadoopbam.sort.input.paths";
private static SamFileHeaderMerger headerMerger = null;
public static SamFileHeaderMerger getHeaderMerger(Configuration conf)
throws IOException
{
// TODO: it would be preferable to cache this beforehand instead of
// having every task read the header block of every input file. But that
// would be trickier, given that SamFileHeaderMerger isn't trivially
// serializable.
// Save it in a static field, though, in case that helps anything.
if (headerMerger != null)
return headerMerger;
final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
for (final String in : conf.getStrings(INPUT_PATHS_PROP)) {
final Path p = new Path(in);
final SAMFileReader r =
new SAMFileReader(p.getFileSystem(conf).open(p));
headers.add(r.getFileHeader());
r.close();
}
return headerMerger = new SamFileHeaderMerger(
SAMFileHeader.SortOrder.coordinate, headers, true);
}
}
final class SortReducer
extends Reducer<LongWritable,SAMRecordWritable,
NullWritable,SAMRecordWritable>
{
@Override protected void reduce(
LongWritable ignored, Iterable<SAMRecordWritable> records,
Reducer<LongWritable,SAMRecordWritable,
NullWritable,SAMRecordWritable>.Context
ctx)
throws IOException, InterruptedException
{
for (SAMRecordWritable rec : records)
ctx.write(NullWritable.get(), rec);
}
}
// Because we want a total order and we may change the key when merging
// headers, we can't use a mapper here: the InputSampler reads directly from
// the InputFormat.
final class SortInputFormat
extends FileInputFormat<LongWritable,SAMRecordWritable>
{
private AnySAMInputFormat baseIF = null;
private void initBaseIF(final Configuration conf) {
if (baseIF == null)
baseIF = new AnySAMInputFormat(conf);
}
@Override public RecordReader<LongWritable,SAMRecordWritable>
createRecordReader(InputSplit split, TaskAttemptContext ctx)
throws InterruptedException, IOException
{
initBaseIF(ctx.getConfiguration());
final RecordReader<LongWritable,SAMRecordWritable> rr =
new SortRecordReader(baseIF.createRecordReader(split, ctx));
rr.initialize(split, ctx);
return rr;
}
@Override protected boolean isSplitable(JobContext job, Path path) {
initBaseIF(job.getConfiguration());
return baseIF.isSplitable(job, path);
}
@Override public List<InputSplit> getSplits(JobContext job)
throws IOException
{
initBaseIF(job.getConfiguration());
return baseIF.getSplits(job);
}
}
final class SortRecordReader
extends RecordReader<LongWritable,SAMRecordWritable>
{
private final RecordReader<LongWritable,SAMRecordWritable> baseRR;
private SamFileHeaderMerger headerMerger;
public SortRecordReader(RecordReader<LongWritable,SAMRecordWritable> rr) {
baseRR = rr;
}
@Override public void initialize(InputSplit spl, TaskAttemptContext ctx)
throws InterruptedException, IOException
{
headerMerger = Sort.getHeaderMerger(ctx.getConfiguration());
}
@Override public void close() throws IOException { baseRR.close(); }
@Override public float getProgress()
throws InterruptedException, IOException
{
return baseRR.getProgress();
}
@Override public LongWritable getCurrentKey()
throws InterruptedException, IOException
{
return baseRR.getCurrentKey();
}
@Override public SAMRecordWritable getCurrentValue()
throws InterruptedException, IOException
{
return baseRR.getCurrentValue();
}
@Override public boolean nextKeyValue()
throws InterruptedException, IOException
{
if (!baseRR.nextKeyValue())
return false;
final SAMRecord r = getCurrentValue().get();
final SAMFileHeader h = r.getHeader();
// Correct the reference indices, and thus the key, if necessary.
if (headerMerger.hasMergedSequenceDictionary()) {
final int ri = headerMerger.getMergedSequenceIndex(
h, r.getReferenceIndex());
r.setReferenceIndex(ri);
if (r.getReadPairedFlag())
r.setMateReferenceIndex(headerMerger.getMergedSequenceIndex(
h, r.getMateReferenceIndex()));
getCurrentKey().set((long)ri << 32 | r.getAlignmentStart() - 1);
}
// Correct the program group if necessary.
if (headerMerger.hasProgramGroupCollisions()) {
final String pg = (String)r.getAttribute(
ReservedTagConstants.PROGRAM_GROUP_ID);
if (pg != null)
r.setAttribute(
ReservedTagConstants.PROGRAM_GROUP_ID,
headerMerger.getProgramGroupId(h, pg));
}
// Correct the read group if necessary.
if (headerMerger.hasReadGroupCollisions()) {
final String rg = (String)r.getAttribute(
ReservedTagConstants.READ_GROUP_ID);
if (rg != null)
r.setAttribute(
ReservedTagConstants.READ_GROUP_ID,
headerMerger.getProgramGroupId(h, rg));
}
getCurrentValue().set(r);
return true;
}
}
final class SortOutputFormat
extends FileOutputFormat<NullWritable,SAMRecordWritable>
{
public static final String OUTPUT_NAME_PROP = "hadoopbam.sort.output.name";
private KeyIgnoringAnySAMOutputFormat<NullWritable> baseOF;
private void initBaseOF(Configuration conf) {
if (baseOF == null)
baseOF = new KeyIgnoringAnySAMOutputFormat<NullWritable>(conf);
}
@Override public RecordWriter<NullWritable,SAMRecordWritable>
getRecordWriter(TaskAttemptContext context)
throws IOException
{
initBaseOF(context.getConfiguration());
if (baseOF.getSAMHeader() == null)
baseOF.setSAMHeader(Sort.getHeaderMerger(
context.getConfiguration()).getMergedHeader());
return baseOF.getRecordWriter(context, getDefaultWorkFile(context, ""));
}
@Override public Path getDefaultWorkFile(
TaskAttemptContext context, String ext)
throws IOException
{
initBaseOF(context.getConfiguration());
String filename = context.getConfiguration().get(OUTPUT_NAME_PROP);
String extension = ext.isEmpty() ? ext : "." + ext;
int part = context.getTaskAttemptID().getTaskID().getId();
return new Path(baseOF.getDefaultWorkFile(context, ext).getParent(),
filename + "-" + String.format("%06d", part) + extension);
}
// Allow the output directory to exist.
@Override public void checkOutputSpecs(JobContext job)
throws FileAlreadyExistsException, IOException
{}
}
|
package fitnesse.components;
import java.io.File;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List;
/**
* Update the current thread class path with jars foundin a "plugins" directory.
*/
public class PluginsClassLoader {
public static ClassLoader getClassLoader(String rootPath) throws Exception {
ClassLoader result = ClassLoader.getSystemClassLoader();
File pluginsDirectory = new File(rootPath, "plugins");
URL[] urls = urlsForPlugins(pluginsDirectory);
if (urls.length > 0) {
result = new URLClassLoader(urls, result);
}
return result;
}
private static URL[] urlsForPlugins(File pluginsDirectory) throws Exception {
List<URL> urls = new ArrayList<>();
if (pluginsDirectory.exists() && pluginsDirectory.isDirectory())
for (File plugin : pluginsDirectory.listFiles())
if (plugin.getName().endsWith("jar"))
urls.addAll(toUrls(plugin.getCanonicalPath()));
return urls.toArray(new URL[urls.size()]);
}
private static List<URL> toUrls(String classpathItems) throws Exception {
final String separator = File.pathSeparator;
String currentClassPath = System.getProperty("java.class.path");
System.setProperty("java.class.path", currentClassPath + separator + classpathItems);
String[] items = classpathItems.split(separator);
List<URL> urls = new ArrayList<>(items.length);
for (String item : items) {
urls.add(toUrl(item));
}
return urls;
}
private static URL toUrl(String fileName) throws MalformedURLException {
return new File(fileName).toURI().toURL();
}
}
|
package de.onyxbits.sensorreadout;
import android.app.Activity;
import android.os.Bundle;
import android.widget.*;
import android.view.*;
import android.hardware.*;
import java.util.*;
import android.graphics.*;
import android.content.*;
import android.content.pm.*;
import android.content.res.*;
import android.os.*;
import org.achartengine.*;
import org.achartengine.chart.*;
import org.achartengine.model.*;
import org.achartengine.renderer.*;
/**
* <code>Activity</code> that displays the readout of one <code>Sensor</code>.
* This <code>Activity</code> must be started with an <code>Intent</code> that passes
* in the number of the <code>Sensor</code> to display. If none is passed, the
* first available <code>Sensor</code> is used.
*/
public class ReadoutActivity extends Activity {
/**
* For passing the index number of the <code>Sensor</code> in its <code>SensorManager</code>
*/
public static final String SENSORINDEX = "de.onyxbits.sensorreadout.SensorIndex";
/**
* The <code>Sensor</code> we are dealing with
*/
private Sensor sensor;
/**
* Allows the user to pause/resume
*/
private ToggleButton update;
/**
* The displaying component
*/
protected GraphicalView chartView;
/**
* Dataset of the graphing component
*/
private XYMultipleSeriesDataset sensorData;
/**
* Renderer for actually drawing the graph
*/
protected XYMultipleSeriesRenderer renderer;
/**
* Our <code>SensorManager</code>
*/
private SensorManager sensorManager;
/**
* Data channels. Corresponds to <code>SensorEvent.values</code>. Individual channels may
* be set to null to indicate that they must not be painted.
*/
protected XYSeries channel[];
/**
* The ticker thread takes care of updating the UI
*/
private Thread ticker;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sensorManager = (SensorManager)getSystemService(SENSOR_SERVICE);
int idx = getIntent().getIntExtra(SENSORINDEX,0);
sensor = sensorManager.getSensorList(Sensor.TYPE_ALL).get(idx);
setTitle(sensor.getName());
// Build the content view manually. With the GraphicalView it's too much hassle to do it
// in XML
LinearLayout contentView = new LinearLayout(this);
contentView.setOrientation(LinearLayout.VERTICAL);
LinearLayout actionBar = new LinearLayout(this);
update = new ToggleButton(this);
actionBar.addView(update);
contentView.addView(actionBar);
sensorData = new XYMultipleSeriesDataset();
renderer = new XYMultipleSeriesRenderer();
renderer.setGridColor(Color.DKGRAY);
renderer.setShowGrid(true);
renderer.setXAxisMin(0.0);
renderer.setXAxisMax(100);
renderer.setXTitle("Time (ms)");
renderer.setChartTitle(" ");
renderer.setYLabelsAlign(Paint.Align.RIGHT);
chartView = ChartFactory.getLineChartView(this,sensorData,renderer);
// Note: The chart is not ready to use yet! It still lacks some information, we can only
// obtain from a SensorEvent, so its either sticking to only known sensors or defereing
// the final setup till we get our hands on such an event. Design choice: Let's try to even
// handle unknown sensors as good as we can.
//contentView.addView(chartView);
setContentView(chartView);
}
/**
* Final configuration step. Must be called between receiving the first <code>SensorEvent</code>
* and updating the graph for the first time. This is done from the ticker thread.
* @param event the event
*/
protected void configure(SensorEvent event) {
String channelNames[] = {"X-Axis", "Y-Axis", "Z-Axis"}; // Defaults...
channel = new XYSeries[event.values.length]; // ..work for most sensors
switch (event.sensor.getType()) {
case Sensor.TYPE_ACCELEROMETER: {
renderer.setYTitle("m/s²");
break;
}
case Sensor.TYPE_GRAVITY: {
channel = new XYSeries[event.values.length];
renderer.setYTitle("m/s²");
break;
}
case Sensor.TYPE_GYROSCOPE: {
channel = new XYSeries[event.values.length];
renderer.setYTitle("rad/s");
break;
}
case Sensor.TYPE_LIGHT: {
channel = new XYSeries[1];
channelNames = new String[1];
channelNames[0] = "Light";
renderer.setYTitle("lux");
break;
}
case Sensor.TYPE_LINEAR_ACCELERATION: {
renderer.setYTitle("m/s²");
break;
}
case Sensor.TYPE_MAGNETIC_FIELD: {
renderer.setYTitle("µT");
break;
}
case Sensor.TYPE_PRESSURE: {
renderer.setYTitle("hPa");
break;
}
case Sensor.TYPE_PROXIMITY: {
channel = new XYSeries[1];
channelNames = new String[1];
channelNames[0] = "Distance";
renderer.setYTitle("cm");
break;
}
case Sensor.TYPE_ROTATION_VECTOR: {
break;
}
case Sensor.TYPE_ORIENTATION: {
break;
}
default: {
// Unknown sensor -> Just show all the channels.
channel = new XYSeries[event.values.length];
for (int i=0;i<channelNames.length;i++) channelNames[i]="Channel"+i;
}
}
int[] colors = { Color.BLUE, Color.YELLOW, Color.RED, Color.GREEN };
for (int i=0;i<channel.length;i++) {
channel[i] = new XYSeries(channelNames[i]);
sensorData.addSeries(channel[i]);
XYSeriesRenderer r = new XYSeriesRenderer();
r.setColor(colors[i % colors.length] );
renderer.addSeriesRenderer(r);
}
}
@Override
protected void onResume() {
super.onResume();
// Lock the screen to its current rotation. Some sensors become impossible to read otherwise.
switch (getResources().getConfiguration().orientation) {
case Configuration.ORIENTATION_PORTRAIT: {
setRequestedOrientation(
ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
break;
}
case Configuration.ORIENTATION_LANDSCAPE: {
setRequestedOrientation(
ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
break;
}
}
ticker = new Ticker(this);
ticker.start();
sensorManager.registerListener((SensorEventListener)ticker, sensor, SensorManager.SENSOR_DELAY_UI);
}
@Override
protected void onPause() {
super.onPause();
sensorManager.unregisterListener((SensorEventListener)ticker);
try {
ticker.interrupt();
ticker.join();
}
catch (Exception e) {
e.printStackTrace();
}
}
}
|
package com.nfc;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.Log;
import com.rhomobile.rhodes.PushService;
import com.rhomobile.rhodes.RhodesActivity;
import com.rhomobile.rhodes.RhodesService;
import com.rhomobile.rhodes.Utils;
public class Nfc extends BroadcastReceiver {
private static final String TAG = Nfc.class.getSimpleName();
private static boolean ourIsEnable = false;
private static String ourCallback = null;
private static Nfc ourInstance = null;
private static native void callCallback(String callback_url, NfcMessagePack msgpack);
public static Nfc getInstance() {
if (ourInstance == null) {
ourInstance = new Nfc();
}
return ourInstance;
}
public static int isSupported() {
NfcAdapter da = null;
try {
int sdkVersion = Integer.parseInt(Build.VERSION.SDK);
if (sdkVersion >= Build.VERSION_CODES.GINGERBREAD_MR1) {
da = NfcAdapter.getDefaultAdapter(RhodesService.getContext());
}
else if (sdkVersion >= Build.VERSION_CODES.GINGERBREAD) {
da = NfcAdapter.getDefaultAdapter();
}
}
catch (Exception e) {
// nothing
}
if (da == null) {
return 0;
}
int res = 0;
if (da.isEnabled()) {
res = 1;
}
return res;
}
public static int isEnabled() {
int res = 0;
if (ourIsEnable) {
res = 1;
}
return res;
}
public static void setEnable(int enable) {
ourIsEnable = (enable != 0);
}
public static void setCallback(String callback) {
ourCallback = callback;
}
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (NfcAdapter.ACTION_TAG_DISCOVERED.equals(action)) {
Parcelable[] rawMsgs = intent.getParcelableArrayExtra(NfcAdapter.EXTRA_NDEF_MESSAGES);
NdefMessage[] msgs;
if (rawMsgs != null) {
msgs = new NdefMessage[rawMsgs.length];
for (int i = 0; i < rawMsgs.length; i++) {
msgs[i] = (NdefMessage) rawMsgs[i];
}
} else {
// Unknown tag type
byte[] empty = new byte[] {};
NdefRecord record = new NdefRecord(NdefRecord.TNF_UNKNOWN, empty, empty, empty);
NdefMessage msg = new NdefMessage(new NdefRecord[] {record});
msgs = new NdefMessage[] {msg};
}
onReceiveMessages(msgs);
}
}
private static boolean isRhodesApplicationRun() {
return RhodesService.getInstance()!=null;
}
void onReceiveMessages(NdefMessage[] msgs) {
if (msgs == null || msgs.length == 0) {
return;
}
Utils.platformLog(TAG, "NFC TAG Received ! Service started = "+String.valueOf(isRhodesApplicationRun()));
NfcMessagePack pack = new NfcMessagePack(msgs);
if (isRhodesApplicationRun()) {
if (ourIsEnable) {
if (ourCallback != null) {
if (ourCallback.length() > 0) {
callCallback(ourCallback, pack);
}
}
}
}
}
}
|
package io.logz.sender;
import io.logz.sender.exceptions.LogzioParameterErrorException;
import io.logz.test.MockLogzioBulkListener;
import io.logz.test.TestEnvironment;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import static io.logz.sender.LogzioTestSenderUtil.LOGLEVEL;
import static io.logz.sender.LogzioTestSenderUtil.createJsonMessage;
public abstract class LogzioSenderTest {
protected MockLogzioBulkListener mockListener;
private final static Logger logger = LoggerFactory.getLogger(LogzioSenderTest.class);
private static final int INITIAL_WAIT_BEFORE_RETRY_MS = 2000;
private static final int MAX_RETRIES_ATTEMPTS = 3;
private ScheduledExecutorService tasks;
@Before
public void startListenerAndExecutors() throws Exception {
mockListener = new MockLogzioBulkListener();
mockListener.start();
tasks = Executors.newScheduledThreadPool(3);
}
@After
public void stopListenerAndExecutors() {
if (mockListener != null)
mockListener.stop();
if (tasks != null){
tasks.shutdownNow();
}
}
protected void sleepSeconds(int seconds) {
logger.info("Sleeping {} [sec]...", seconds);
try {
Thread.sleep(seconds * 1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
protected LogzioSender.Builder getLogzioSenderBuilder(String token, String type, Integer drainTimeout,
Integer socketTimeout, Integer serverTimeout,
ScheduledExecutorService tasks,
boolean compressRequests)
throws LogzioParameterErrorException{
LogzioTestStatusReporter logy = new LogzioTestStatusReporter(logger);
HttpsRequestConfiguration httpsRequestConfiguration = HttpsRequestConfiguration
.builder()
.setCompressRequests(compressRequests)
.setConnectTimeout(serverTimeout)
.setSocketTimeout(socketTimeout)
.setLogzioToken(token)
.setLogzioType(type)
.setLogzioListenerUrl("http://" + mockListener.getHost() + ":" + mockListener.getPort())
.build();
return LogzioSender
.builder()
.setDebug(false)
.setTasksExecutor(tasks)
.setDrainTimeoutSec(drainTimeout)
.setReporter(logy)
.setHttpsRequestConfiguration(httpsRequestConfiguration);
}
protected LogzioSender createLogzioSender(LogzioSender.Builder logzioSenderBuilder) throws LogzioParameterErrorException {
LogzioSender logzioSender = logzioSenderBuilder.build();
logzioSender.start();
return logzioSender;
}
protected abstract void setZeroThresholdQueue(LogzioSender.Builder logzioSenderBuilder);
protected String random(int numberOfChars) {
return UUID.randomUUID().toString().substring(0, numberOfChars-1);
}
@Test
public void simpleAppending() throws Exception {
String token = "aBcDeFgHiJkLmNoPqRsT";
String type = random(8);
String loggerName = "simpleAppending";
int drainTimeout = 2;
String message1 = "Testing.." + random(5);
String message2 = "Warning test.." + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout,
10 * 1000, 10 * 1000, tasks,false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send( createJsonMessage(loggerName, message1));
testSender.send( createJsonMessage(loggerName, message2));
sleepSeconds(drainTimeout *3);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
@Test
public void simpleGzipAppending() throws Exception {
String token = "gzipToken";
String type = random(8);
String loggerName = "simpleGzipAppending";
int drainTimeout = 2;
String message1 = "Testing.." + random(5);
String message2 = "Warning test.." + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, 10 * 1000,
10 * 1000, tasks, true);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send( createJsonMessage(loggerName, message1));
testSender.send( createJsonMessage(loggerName, message2));
sleepSeconds(drainTimeout *3);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
@Test
public void multipleQueueDrains() throws Exception {
String token = "tokenWohooToken";
String type = random(8);
String loggerName = "multipleQueueDrains";
int drainTimeout = 2;
String message1 = "Testing first drain - " + random(5);
String message2 = "And the second drain" + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, 10 * 1000,
10 * 1000, tasks, false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send(createJsonMessage( loggerName, message1));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
testSender.send(createJsonMessage(loggerName, message2));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
@Test
public void longDrainTimeout() throws Exception {
String token = "soTestingIsSuperImportant";
String type = random(8);
String loggerName = "longDrainTimeout";
int drainTimeout = 10;
String message1 = "Sending one log - " + random(5);
String message2 = "And one more important one - " + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, 10 * 1000,
10 * 1000, tasks, false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send(createJsonMessage(loggerName, message1));
testSender.send(createJsonMessage(loggerName, message2));
mockListener.assertNumberOfReceivedMsgs(0);
sleepSeconds(drainTimeout + 1);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
@Test
public void fsPercentDrop() throws Exception {
String token = "droppingLogsDueToFSOveruse";
String type = random(8);
String loggerName = "fsPercentDrop";
int drainTimeoutSec = 1;
File tempDirectoryThatWillBeInTheSameFsAsTheQueue = TestEnvironment.createTempDirectory();
tempDirectoryThatWillBeInTheSameFsAsTheQueue.deleteOnExit();
String message1 = "First log that will be dropped - " + random(5);
String message2 = "And a second drop - " + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeoutSec, 10 * 1000,
10 * 1000, tasks, false);
setZeroThresholdQueue(testSenderBuilder);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
// verify the thread that checks for space made at least one check
sleepSeconds(2 * drainTimeoutSec);
testSender.send(createJsonMessage(loggerName, message1));
testSender.send(createJsonMessage(loggerName, message2));
sleepSeconds(2 * drainTimeoutSec);
mockListener.assertNumberOfReceivedMsgs(0);
tempDirectoryThatWillBeInTheSameFsAsTheQueue.delete();
}
@Test
public void serverCrash() throws Exception {
String token = "nowWeWillCrashTheServerAndRecover";
String type = random(8);
String loggerName = "serverCrash";
int drainTimeout = 1;
String message1 = "Log before drop - " + random(5);
String message2 = "Log during drop - " + random(5);
String message3 = "Log after drop - " + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, 10 * 1000,
10 * 1000, tasks, false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send(createJsonMessage(loggerName, message1));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.stop();
testSender.send(createJsonMessage(loggerName, message2));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1); // haven't changed - still 1
mockListener.start();
testSender.send(createJsonMessage(loggerName, message3));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(3);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
mockListener.assertLogReceivedIs(message3, token, type, loggerName, LOGLEVEL);
}
@Test
public void getTimeoutFromServer() throws Exception {
String token = "gettingTimeoutFromServer";
String type = random(8);
String loggerName = "getTimeoutFromServer";
int drainTimeout = 1;
int serverTimeout = 2000;
String message1 = "Log that will be sent - " + random(5);
String message2 = "Log that would timeout and then being re-sent - " + random(5);
int socketTimeout = serverTimeout / 2;
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, socketTimeout,
serverTimeout, tasks, false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send(createJsonMessage(loggerName, message1));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.setTimeoutMillis(serverTimeout);
mockListener.setServerTimeoutMode(true);
testSender.send(createJsonMessage(loggerName, message2));
sleepSeconds((socketTimeout / 1000) * MAX_RETRIES_ATTEMPTS + retryTotalDelay());
mockListener.assertNumberOfReceivedMsgs(1); // Stays the same
mockListener.setServerTimeoutMode(false);
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
private int retryTotalDelay() {
int sleepBetweenRetry = INITIAL_WAIT_BEFORE_RETRY_MS / 1000;
int totalSleepTime = 0;
for (int i = 1; i < MAX_RETRIES_ATTEMPTS; i++) {
totalSleepTime += sleepBetweenRetry;
sleepBetweenRetry *= 2;
}
return totalSleepTime;
}
@Test
public void getExceptionFromServer() throws Exception {
String token = "gettingExceptionFromServer";
String type = random(8);
String loggerName = "getExceptionFromServer";
int drainTimeout = 1;
String message1 = "Log that will be sent - " + random(5);
String message2 = "Log that would get exception and be sent again - " + random(5);
LogzioSender.Builder testSenderBuilder = getLogzioSenderBuilder(token, type, drainTimeout, 10 * 1000,
10 * 1000, tasks, false);
LogzioSender testSender = createLogzioSender(testSenderBuilder);
testSender.send(createJsonMessage(loggerName, message1));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1);
mockListener.assertLogReceivedIs(message1, token, type, loggerName, LOGLEVEL);
mockListener.setFailWithServerError(true);
testSender.send(createJsonMessage(loggerName, message2));
sleepSeconds(2 * drainTimeout);
mockListener.assertNumberOfReceivedMsgs(1); // Haven't changed
mockListener.setFailWithServerError(false);
Thread.sleep(drainTimeout * 1000 * 2);
mockListener.assertNumberOfReceivedMsgs(2);
mockListener.assertLogReceivedIs(message2, token, type, loggerName, LOGLEVEL);
}
}
|
package dr.evomodel.antigenic;
import dr.evolution.util.*;
import dr.inference.model.*;
import dr.math.MathUtils;
import dr.math.distributions.NormalDistribution;
import dr.util.*;
import dr.xml.*;
import java.io.*;
import java.util.*;
import java.util.logging.Logger;
/**
* @author Andrew Rambaut
* @author Trevor Bedford
* @author Marc Suchard
* @version $Id$
*/
public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
// column indices in table
private static final int COLUMN_LABEL = 0;
private static final int SERUM_STRAIN = 2;
private static final int ROW_LABEL = 1;
private static final int VIRUS_STRAIN = 3;
private static final int SERUM_DATE = 4;
private static final int VIRUS_DATE = 5;
private static final int RAW_TITRE = 6;
private static final int MIN_TITRE = 7;
private static final int MAX_TITRE = 8;
public enum MeasurementType {
INTERVAL,
POINT,
UPPER_BOUND,
LOWER_BOUND,
MISSING
}
public AntigenicLikelihood(
int mdsDimension,
Parameter mdsPrecisionParameter,
TaxonList strainTaxa,
MatrixParameter locationsParameter,
Parameter datesParameter,
Parameter columnParameter,
Parameter rowParameter,
DataTable<String[]> dataTable,
List<String> virusLocationStatisticList) {
super(ANTIGENIC_LIKELIHOOD);
List<String> strainNames = new ArrayList<String>();
Map<String, Double> strainDateMap = new HashMap<String, Double>();
for (int i = 0; i < dataTable.getRowCount(); i++) {
String[] values = dataTable.getRow(i);
int column = columnLabels.indexOf(values[COLUMN_LABEL]);
if (column == -1) {
columnLabels.add(values[0]);
column = columnLabels.size() - 1;
}
int columnStrain = -1;
if (strainTaxa != null) {
columnStrain = strainTaxa.getTaxonIndex(values[SERUM_STRAIN]);
} else {
columnStrain = strainNames.indexOf(values[SERUM_STRAIN]);
if (columnStrain == -1) {
strainNames.add(values[SERUM_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[VIRUS_STRAIN], date);
columnStrain = strainNames.size() - 1;
}
}
if (columnStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized serum strain name, " + values[SERUM_STRAIN] + ", in row " + (i+1));
}
int row = rowLabels.indexOf(values[ROW_LABEL]);
if (row == -1) {
rowLabels.add(values[ROW_LABEL]);
row = rowLabels.size() - 1;
}
int rowStrain = -1;
if (strainTaxa != null) {
rowStrain = strainTaxa.getTaxonIndex(values[VIRUS_STRAIN]);
} else {
rowStrain = strainNames.indexOf(values[VIRUS_STRAIN]);
if (rowStrain == -1) {
strainNames.add(values[VIRUS_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[VIRUS_STRAIN], date);
rowStrain = strainNames.size() - 1;
}
}
if (rowStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized virus strain name, " + values[VIRUS_STRAIN] + ", in row " + (i+1));
}
double minTitre = Double.NaN;
if (values[MIN_TITRE].length() > 0) {
try {
minTitre = Double.parseDouble(values[MIN_TITRE]);
} catch (NumberFormatException nfe) {
// do nothing
}
}
double maxTitre = Double.NaN;
if (values[MAX_TITRE].length() > 0) {
try {
maxTitre = Double.parseDouble(values[MAX_TITRE]);
} catch (NumberFormatException nfe) {
// do nothing
}
}
MeasurementType type = MeasurementType.INTERVAL;
if (minTitre == maxTitre) {
type = MeasurementType.POINT;
}
if (Double.isNaN(minTitre) || minTitre == 0.0) {
if (Double.isNaN(maxTitre)) {
throw new IllegalArgumentException("Error in measurement: both min and max titre are at bounds in row " + (i+1));
}
type = MeasurementType.UPPER_BOUND;
} else if (Double.isNaN(maxTitre)) {
type = MeasurementType.LOWER_BOUND;
}
Measurement measurement = new Measurement(column, columnStrain, row, rowStrain, type, minTitre, maxTitre);
measurements.add(measurement);
}
double[] maxColumnTitre = new double[columnLabels.size()];
double[] maxRowTitre = new double[rowLabels.size()];
for (Measurement measurement : measurements) {
double titre = measurement.maxTitre;
if (Double.isNaN(titre)) {
titre = measurement.minTitre;
}
if (titre > maxColumnTitre[measurement.column]) {
maxColumnTitre[measurement.column] = titre;
}
if (titre > maxRowTitre[measurement.row]) {
maxRowTitre[measurement.row] = titre;
}
}
if (strainTaxa != null) {
this.strains = strainTaxa;
// fill in the strain name array for local use
for (int i = 0; i < strains.getTaxonCount(); i++) {
strainNames.add(strains.getTaxon(i).getId());
}
} else {
Taxa taxa = new Taxa();
for (String strain : strainNames) {
taxa.addTaxon(new Taxon(strain));
}
this.strains = taxa;
}
this.mdsDimension = mdsDimension;
this.mdsPrecisionParameter = mdsPrecisionParameter;
addVariable(mdsPrecisionParameter);
this.locationsParameter = locationsParameter;
setupLocationsParameter(this.locationsParameter, strainNames);
addVariable(this.locationsParameter);
if (datesParameter != null) {
// this parameter is not used in this class but is setup to be used in other classes
datesParameter.setDimension(strainNames.size());
((Parameter.Abstract)datesParameter).setDimensionNames((String[])strainNames.toArray());
for (int i = 0; i < strainNames.size(); i++) {
double date = strainDateMap.get(strainNames.get(i));
datesParameter.setParameterValue(i, date);
}
}
if (columnParameter == null) {
this.columnEffectsParameter = new Parameter.Default("columnEffects");
} else {
this.columnEffectsParameter = columnParameter;
}
this.columnEffectsParameter.setDimension(columnLabels.size());
addVariable(this.columnEffectsParameter);
String[] labelArray = new String[columnLabels.size()];
columnLabels.toArray(labelArray);
((Parameter.Abstract)this.columnEffectsParameter).setDimensionNames(labelArray);
for (int i = 0; i < maxColumnTitre.length; i++) {
this.columnEffectsParameter.setParameterValue(i, maxColumnTitre[i]);
}
if (rowParameter == null) {
this.rowEffectsParameter = new Parameter.Default("rowEffects");
} else {
this.rowEffectsParameter = rowParameter;
}
this.rowEffectsParameter.setDimension(rowLabels.size());
addVariable(this.rowEffectsParameter);
labelArray = new String[rowLabels.size()];
rowLabels.toArray(labelArray);
((Parameter.Abstract)this.rowEffectsParameter).setDimensionNames(labelArray);
for (int i = 0; i < maxRowTitre.length; i++) {
this.rowEffectsParameter.setParameterValue(i, maxRowTitre[i]);
}
StringBuilder sb = new StringBuilder();
sb.append("\tAntigenicLikelihood:\n");
sb.append("\t\t" + this.strains.getTaxonCount() + " strains\n");
sb.append("\t\t" + columnLabels.size() + " unique columns\n");
sb.append("\t\t" + rowLabels.size() + " unique rows\n");
sb.append("\t\t" + measurements.size() + " assay measurements\n");
Logger.getLogger("dr.evomodel").info(sb.toString());
// some random initial locations
for (int i = 0; i < locationsParameter.getParameterCount(); i++) {
for (int j = 0; j < mdsDimension; j++) {
// double r = MathUtils.nextGaussian();
double r = 0.0;
if (j == 0) {
r = (double) i * 0.05;
}
else {
r = MathUtils.nextGaussian();
}
locationsParameter.getParameter(i).setParameterValueQuietly(j, r);
}
}
locationChanged = new boolean[this.locationsParameter.getRowDimension()];
logLikelihoods = new double[measurements.size()];
storedLogLikelihoods = new double[measurements.size()];
likelihoodKnown = false;
}
protected void setupLocationsParameter(MatrixParameter locationsParameter, List<String> strains) {
locationsParameter.setColumnDimension(mdsDimension);
locationsParameter.setRowDimension(strains.size());
for (int i = 0; i < strains.size(); i++) {
locationsParameter.getParameter(i).setId(strains.get(i));
}
}
@Override
protected void handleModelChangedEvent(Model model, Object object, int index) {
}
@Override
protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
if (variable == locationsParameter) {
locationChanged[index / mdsDimension] = true;
} else if (variable == mdsPrecisionParameter) {
} else if (variable == columnEffectsParameter) {
} else if (variable == rowEffectsParameter) {
} else {
// could be a derived class's parameter
}
likelihoodKnown = false;
}
@Override
protected void storeState() {
System.arraycopy(logLikelihoods, 0, storedLogLikelihoods, 0, logLikelihoods.length);
}
@Override
protected void restoreState() {
double[] tmp = logLikelihoods;
logLikelihoods = storedLogLikelihoods;
storedLogLikelihoods = tmp;
likelihoodKnown = false;
}
@Override
protected void acceptState() {
}
@Override
public Model getModel() {
return this;
}
@Override
public double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = computeLogLikelihood();
}
return logLikelihood;
}
// This function can be overwritten to implement other sampling densities, i.e. discrete ranks
private double computeLogLikelihood() {
double precision = mdsPrecisionParameter.getParameterValue(0);
double sd = 1.0 / Math.sqrt(precision);
logLikelihood = 0.0;
int i = 0;
for (Measurement measurement : measurements) {
if (locationChanged[measurement.rowStrain] || locationChanged[measurement.columnStrain]) {
double distance = computeDistance(measurement.rowStrain, measurement.columnStrain);
double logNormalization = calculateTruncationNormalization(distance, sd);
switch (measurement.type) {
case INTERVAL: {
double minTitre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
double maxTitre = transformTitre(measurement.maxTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementIntervalLikelihood(minTitre, maxTitre) - logNormalization;
} break;
case POINT: {
double titre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementLikelihood(titre) - logNormalization;
} break;
case LOWER_BOUND: {
double minTitre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementLowerBoundLikelihood(minTitre) - logNormalization;
} break;
case UPPER_BOUND: {
double maxTitre = transformTitre(measurement.maxTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementUpperBoundLikelihood(maxTitre) - logNormalization;
} break;
case MISSING:
break;
}
}
logLikelihood += logLikelihoods[i];
i++;
}
likelihoodKnown = true;
clearLocationChangedFlags();
return logLikelihood;
}
private void clearLocationChangedFlags() {
for (int i = 0; i < locationChanged.length; i++) {
locationChanged[i] = false;
}
}
protected double computeDistance(int rowStrain, int columnStrain) {
if (rowStrain == columnStrain) {
return 0.0;
}
Parameter X = locationsParameter.getParameter(rowStrain);
Parameter Y = locationsParameter.getParameter(columnStrain);
double sum = 0.0;
for (int i = 0; i < mdsDimension; i++) {
double difference = X.getParameterValue(i) - Y.getParameterValue(i);
sum += difference * difference;
}
return Math.sqrt(sum);
}
/**
* Transforms a titre into log2 space and normalizes it with respect to a unit normal
* @param titre
* @param column
* @param row
* @param mean
* @param sd
* @return
*/
private double transformTitre(double titre, int column, int row, double mean, double sd) {
double rowEffect = rowEffectsParameter.getParameterValue(row);
double columnEffect = columnEffectsParameter.getParameterValue(column);
double t = ((rowEffect + columnEffect) * 0.5) - titre;
return (t - mean) / sd;
}
private double computeMeasurementIntervalLikelihood(double minTitre, double maxTitre) {
double cdf1 = NormalDistribution.standardCDF(minTitre, false);
double cdf2 = NormalDistribution.standardCDF(maxTitre, false);
double lnL = Math.log(cdf1 - cdf2);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementLikelihood(double titre) {
double lnL = Math.log(NormalDistribution.pdf(titre, 0.0, 1.0));
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementLowerBoundLikelihood(double transformedMinTitre) {
// a lower bound in non-transformed titre so the bottom tail of the distribution
double cdf = NormalDistribution.standardTail(transformedMinTitre, true);
double lnL = Math.log(cdf);
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementUpperBoundLikelihood(double transformedMaxTitre) {
// a upper bound in non-transformed titre so the upper tail of the distribution
// using special tail function of NormalDistribution (see main() in NormalDistribution for test)
double tail = NormalDistribution.standardTail(transformedMaxTitre, false);
double lnL = Math.log(tail);
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double calculateTruncationNormalization(double distance, double sd) {
return NormalDistribution.cdf(distance, 0.0, sd, true);
}
@Override
public void makeDirty() {
likelihoodKnown = false;
}
private class Measurement {
private Measurement(final int column, final int columnStrain, final int row, final int rowStrain, final MeasurementType type, final double minTitre, final double maxTitre) {
this.column = column;
this.columnStrain = columnStrain;
this.row = row;
this.rowStrain = rowStrain;
this.type = type;
this.minTitre = Math.log(minTitre) / Math.log(2);
this.maxTitre = Math.log(maxTitre) / Math.log(2);
}
final int column;
final int row;
final int columnStrain;
final int rowStrain;
final MeasurementType type;
final double minTitre;
final double maxTitre;
};
private final List<Measurement> measurements = new ArrayList<Measurement>();
private final List<String> columnLabels = new ArrayList<String>();
private final List<String> rowLabels = new ArrayList<String>();
private final int mdsDimension;
private final Parameter mdsPrecisionParameter;
private final MatrixParameter locationsParameter;
private final TaxonList strains;
// private final CompoundParameter tipTraitParameter;
private final Parameter columnEffectsParameter;
private final Parameter rowEffectsParameter;
private double logLikelihood = 0.0;
private boolean likelihoodKnown = false;
private final boolean[] locationChanged;
private double[] logLikelihoods;
private double[] storedLogLikelihoods;
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public final static String FILE_NAME = "fileName";
public final static String TIP_TRAIT = "tipTrait";
public final static String LOCATIONS = "locations";
public final static String DATES = "dates";
public static final String MDS_DIMENSION = "mdsDimension";
public static final String MDS_PRECISION = "mdsPrecision";
public static final String COLUMN_EFFECTS = "columnEffects";
public static final String ROW_EFFECTS = "rowEffects";
public static final String STRAINS = "strains";
public String getParserName() {
return ANTIGENIC_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String fileName = xo.getStringAttribute(FILE_NAME);
DataTable<String[]> assayTable;
try {
assayTable = DataTable.Text.parse(new FileReader(fileName), true, false);
} catch (IOException e) {
throw new XMLParseException("Unable to read assay data from file: " + e.getMessage());
}
int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
// CompoundParameter tipTraitParameter = null;
// if (xo.hasChildNamed(TIP_TRAIT)) {
// tipTraitParameter = (CompoundParameter) xo.getElementFirstChild(TIP_TRAIT);
TaxonList strains = null;
if (xo.hasChildNamed(STRAINS)) {
strains = (TaxonList) xo.getElementFirstChild(STRAINS);
}
MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
Parameter datesParameter = null;
if (xo.hasChildNamed(STRAINS)) {
datesParameter = (Parameter) xo.getElementFirstChild(DATES);
}
Parameter mdsPrecision = (Parameter) xo.getElementFirstChild(MDS_PRECISION);
Parameter columnEffectsParameter = (Parameter) xo.getElementFirstChild(COLUMN_EFFECTS);
Parameter rowEffectsParameter = (Parameter) xo.getElementFirstChild(ROW_EFFECTS);
AntigenicLikelihood AGL = new AntigenicLikelihood(
mdsDimension,
mdsPrecision,
strains,
locationsParameter,
datesParameter,
columnEffectsParameter,
rowEffectsParameter,
assayTable,
null);
Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
return AGL;
}
|
package dr.evomodel.operators;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evomodel.tree.TreeModel;
import dr.evomodelxml.operators.SubtreeJumpOperatorParser;
import dr.evomodelxml.operators.SubtreeSlideOperatorParser;
import dr.inference.operators.CoercableMCMCOperator;
import dr.inference.operators.CoercionMode;
import dr.inference.operators.OperatorFailedException;
import dr.inference.operators.OperatorUtils;
import dr.math.MathUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Implements the Subtree Jump move.
*
* @author Andrew Rambaut
* @version $Id$
*/
public class SubtreeJumpOperator extends AbstractTreeOperator /* implements CoercableMCMCOperator */ { // not coercable at the moment.
private TreeModel tree = null;
// private CoercionMode mode = CoercionMode.DEFAULT;
public SubtreeJumpOperator(TreeModel tree, double weight) {
this.tree = tree;
setWeight(weight);
}
/**
* Do a subtree jump move.
*
* @return the log-transformed hastings ratio
*/
public double doOperation() throws OperatorFailedException {
double logq;
final NodeRef root = tree.getRoot();
final double oldTreeHeight = tree.getNodeHeight(root);
NodeRef i;
// 1. choose a random node avoiding root or child of root
do {
i = tree.getNode(MathUtils.nextInt(tree.getNodeCount()));
} while (root == i || tree.getParent(i) == root);
final NodeRef iP = tree.getParent(i);
final NodeRef CiP = getOtherChild(tree, iP, i);
final NodeRef PiP = tree.getParent(iP);
// get the height of the parent
final double height = tree.getNodeHeight(iP);
// get a list of all edges that intersect this height
final List<NodeRef> destinations = getIntersectingEdges(tree, height);
// remove the target node and its sibling (shouldn't be there because their parent's height is exactly equal to the target height).
destinations.remove(i);
destinations.remove(CiP);
// pick uniformly from this list
final NodeRef j = destinations.get(MathUtils.nextInt(destinations.size()));
final NodeRef jP = tree.getParent(j);
tree.beginTreeEdit();
// remove the parent of i by connecting its sibling to its grandparent.
tree.removeChild(iP, CiP);
tree.removeChild(PiP, iP);
tree.addChild(PiP, CiP);
// remove destination edge j from its parent
tree.removeChild(jP, j);
// add destination edge to the parent of i
tree.addChild(iP, j);
// and add the parent of i as a child of the former parent of j
tree.addChild(jP, iP);
tree.endTreeEdit();
logq = 0.0;
return logq;
}
private List<NodeRef> getIntersectingEdges(Tree tree, double height) {
List<NodeRef> intersectingEdges = new ArrayList<NodeRef>();
for (int i = 0; i < tree.getNodeCount(); i++) {
final NodeRef node = tree.getNode(i);
final NodeRef parent = tree.getParent(node);
if (parent != null && tree.getNodeHeight(node) < height && tree.getNodeHeight(parent) > height) {
intersectingEdges.add(node);
}
}
return intersectingEdges;
}
public double getTargetAcceptanceProbability() {
return 0.234;
}
public String getPerformanceSuggestion() {
return "";
// double prob = Utils.getAcceptanceProbability(this);
// double targetProb = getTargetAcceptanceProbability();
// double ws = OperatorUtils.optimizeWindowSize(getSize(), Double.MAX_VALUE, prob, targetProb);
// if (prob < getMinimumGoodAcceptanceLevel()) {
// return "Try decreasing size to about " + ws;
// } else if (prob > getMaximumGoodAcceptanceLevel()) {
// return "Try increasing size to about " + ws;
// } else return "";
}
public String getOperatorName() {
return SubtreeJumpOperatorParser.SUBTREE_JUMP + "(" + tree.getId() + ")";
}
}
|
package dr.evomodel.treelikelihood;
import dr.evolution.alignment.AscertainedSitePatterns;
import dr.evolution.alignment.PatternList;
import dr.evolution.alignment.SitePatterns;
import dr.evolution.datatype.DataType;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evolution.util.TaxonList;
import dr.evomodel.branchratemodel.BranchRateModel;
import dr.evomodel.branchratemodel.DefaultBranchRateModel;
import dr.evomodel.sitemodel.SiteModel;
import dr.evomodel.substmodel.FrequencyModel;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.Likelihood;
import dr.inference.model.Model;
import dr.inference.model.Statistic;
import dr.xml.*;
import java.util.logging.Logger;
/**
* TreeLikelihoodModel - implements a Likelihood Function for sequences on a tree.
*
* @author Andrew Rambaut
* @author Alexei Drummond
* @version $Id: TreeLikelihood.java,v 1.31 2006/08/30 16:02:42 rambaut Exp $
*/
public class TreeLikelihood extends AbstractTreeLikelihood {
public static final String TREE_LIKELIHOOD = "treeLikelihood";
public static final String USE_AMBIGUITIES = "useAmbiguities";
public static final String ALLOW_MISSING_TAXA = "allowMissingTaxa";
public static final String STORE_PARTIALS = "storePartials";
public static final String SCALING_FACTOR = "scalingFactor";
public static final String SCALING_THRESHOLD = "scalingThreshold";
public static final String FORCE_JAVA_CORE = "forceJavaCore";
public static final String FORCE_RESCALING = "forceRescaling";
/**
* Constructor.
*/
public TreeLikelihood(PatternList patternList,
TreeModel treeModel,
SiteModel siteModel,
BranchRateModel branchRateModel,
TipPartialsModel tipPartialsModel,
boolean useAmbiguities,
boolean allowMissingTaxa,
boolean storePartials,
boolean forceJavaCore,
boolean forceRescaling) {
super(TREE_LIKELIHOOD, patternList, treeModel);
this.storePartials = storePartials;
try {
this.siteModel = siteModel;
addModel(siteModel);
this.frequencyModel = siteModel.getFrequencyModel();
addModel(frequencyModel);
this.tipPartialsModel = tipPartialsModel;
integrateAcrossCategories = siteModel.integrateAcrossCategories();
this.categoryCount = siteModel.getCategoryCount();
final Logger logger = Logger.getLogger("dr.evomodel");
String coreName = "Java general";
if (integrateAcrossCategories) {
final DataType dataType = patternList.getDataType();
if (dataType instanceof dr.evolution.datatype.Nucleotides) {
if (!forceJavaCore && NativeNucleotideLikelihoodCore.isAvailable()) {
coreName = "native nucleotide";
likelihoodCore = new NativeNucleotideLikelihoodCore();
} else {
coreName = "Java nucleotide";
likelihoodCore = new NucleotideLikelihoodCore();
}
} else if (dataType instanceof dr.evolution.datatype.AminoAcids) {
if (!forceJavaCore && NativeAminoAcidLikelihoodCore.isAvailable()) {
coreName = "native amino acid";
likelihoodCore = new NativeAminoAcidLikelihoodCore();
} else {
coreName = "Java amino acid";
likelihoodCore = new AminoAcidLikelihoodCore();
}
} else if (dataType instanceof dr.evolution.datatype.Codons) {
// The codon core was out of date and did nothing more than the general core...
likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
useAmbiguities = true;
} else {
if (!forceJavaCore && NativeGeneralLikelihoodCore.isAvailable()) {
coreName = "native general";
likelihoodCore = new NativeGeneralLikelihoodCore(patternList.getStateCount());
} else {
coreName = "Java general";
likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
}
}
} else {
likelihoodCore = new GeneralLikelihoodCore(patternList.getStateCount());
}
{
final String id = getId();
logger.info("TreeLikelihood(" + ((id != null) ? id : treeModel.getId()) + ") using " + coreName + " likelihood core");
logger.info(" " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
logger.info(" With " + patternList.getPatternCount() + " unique site patterns.");
}
if (branchRateModel != null) {
this.branchRateModel = branchRateModel;
logger.info("Branch rate model used: " + branchRateModel.getModelName());
} else {
this.branchRateModel = new DefaultBranchRateModel();
}
addModel(this.branchRateModel);
probabilities = new double[stateCount * stateCount];
likelihoodCore.initialize(nodeCount, patternCount, categoryCount, integrateAcrossCategories);
int extNodeCount = treeModel.getExternalNodeCount();
int intNodeCount = treeModel.getInternalNodeCount();
if (tipPartialsModel != null) {
tipPartialsModel.setTree(treeModel);
tipPartials = new double[patternCount * stateCount];
for (int i = 0; i < extNodeCount; i++) {
// Find the id of tip i in the patternList
String id = treeModel.getTaxonId(i);
int index = patternList.getTaxonIndex(id);
if (index == -1) {
throw new TaxonList.MissingTaxonException("Taxon, " + id + ", in tree, " + treeModel.getId() +
", is not found in patternList, " + patternList.getId());
}
tipPartialsModel.setStates(patternList, index, i, id);
likelihoodCore.createNodePartials(i);
}
addModel(tipPartialsModel);
//useAmbiguities = true;
} else {
for (int i = 0; i < extNodeCount; i++) {
// Find the id of tip i in the patternList
String id = treeModel.getTaxonId(i);
int index = patternList.getTaxonIndex(id);
if (index == -1) {
if (!allowMissingTaxa) {
throw new TaxonList.MissingTaxonException("Taxon, " + id + ", in tree, " + treeModel.getId() +
", is not found in patternList, " + patternList.getId());
}
if (useAmbiguities) {
setMissingPartials(likelihoodCore, i);
} else {
setMissingStates(likelihoodCore, i);
}
} else {
if (useAmbiguities) {
setPartials(likelihoodCore, patternList, categoryCount, index, i);
} else {
setStates(likelihoodCore, patternList, index, i);
}
}
}
}
for (int i = 0; i < intNodeCount; i++) {
likelihoodCore.createNodePartials(extNodeCount + i);
}
if (forceRescaling) {
likelihoodCore.setUseScaling(true);
logger.info(" Forcing use of partials rescaling.");
}
} catch (TaxonList.MissingTaxonException mte) {
throw new RuntimeException(mte.toString());
}
addStatistic(new SiteLikelihoodsStatistic());
}
public final LikelihoodCore getLikelihoodCore() {
return likelihoodCore;
}
// ModelListener IMPLEMENTATION
/**
* Handles model changed events from the submodels.
*/
protected void handleModelChangedEvent(Model model, Object object, int index) {
if (model == treeModel) {
if (object instanceof TreeModel.TreeChangedEvent) {
if (((TreeModel.TreeChangedEvent) object).isNodeChanged()) {
// If a node event occurs the node and its two child nodes
// are flagged for updating (this will result in everything
// above being updated as well. Node events occur when a node
// is added to a branch, removed from a branch or its height or
// rate changes.
updateNodeAndChildren(((TreeModel.TreeChangedEvent) object).getNode());
} else if (((TreeModel.TreeChangedEvent) object).isTreeChanged()) {
// Full tree events result in a complete updating of the tree likelihood
updateAllNodes();
} else {
// Other event types are ignored (probably trait changes).
//System.err.println("Another tree event has occured (possibly a trait change).");
}
}
} else if (model == branchRateModel) {
if (index == -1) {
updateAllNodes();
} else {
updateNode(treeModel.getNode(index));
}
} else if (model == frequencyModel) {
updateAllNodes();
} else if (model == tipPartialsModel) {
updateAllNodes();
} else if (model instanceof SiteModel) {
updateAllNodes();
} else {
throw new RuntimeException("Unknown componentChangedEvent");
}
super.handleModelChangedEvent(model, object, index);
}
// Model IMPLEMENTATION
/**
* Stores the additional state other than model components
*/
protected void storeState() {
if (storePartials) {
likelihoodCore.storeState();
}
super.storeState();
}
/**
* Restore the additional stored state
*/
protected void restoreState() {
if (storePartials) {
likelihoodCore.restoreState();
} else {
updateAllNodes();
}
super.restoreState();
}
// Likelihood IMPLEMENTATION
/**
* Calculate the log likelihood of the current state.
*
* @return the log likelihood.
*/
protected double calculateLogLikelihood() {
if (patternLogLikelihoods == null) {
patternLogLikelihoods = new double[patternCount];
}
if (!integrateAcrossCategories) {
if (siteCategories == null) {
siteCategories = new int[patternCount];
}
for (int i = 0; i < patternCount; i++) {
siteCategories[i] = siteModel.getCategoryOfSite(i);
}
}
if (tipPartialsModel != null) {
int extNodeCount = treeModel.getExternalNodeCount();
for (int index = 0; index < extNodeCount; index++) {
if (updateNode[index]) {
likelihoodCore.setNodePartialsForUpdate(index);
tipPartialsModel.getTipPartials(index, tipPartials);
likelihoodCore.setCurrentNodePartials(index, tipPartials);
}
}
}
final NodeRef root = treeModel.getRoot();
traverse(treeModel, root);
double logL = 0.0;
double ascertainmentCorrection = getAscertainmentCorrection(patternLogLikelihoods);
for (int i = 0; i < patternCount; i++) {
logL += (patternLogLikelihoods[i] - ascertainmentCorrection) * patternWeights[i];
}
if (logL == Double.NEGATIVE_INFINITY) {
Logger.getLogger("dr.evomodel").info("TreeLikelihood, " + this.getId() + ", turning on partial likelihood scaling to avoid precision loss");
// We probably had an underflow... turn on scaling
likelihoodCore.setUseScaling(true);
// and try again...
updateAllNodes();
updateAllPatterns();
traverse(treeModel, root);
logL = 0.0;
ascertainmentCorrection = getAscertainmentCorrection(patternLogLikelihoods);
for (int i = 0; i < patternCount; i++) {
logL += (patternLogLikelihoods[i] - ascertainmentCorrection) * patternWeights[i];
}
}
/* Calculate ascertainment correction if working off of AscertainedSitePatterns
@param patternProbs log pattern probabilities
@return the log total probability for a pattern.
*/
protected double getAscertainmentCorrection(double[] patternProbs) {
// This function probably belongs better to the AscertainedSitePatterns
double excludeProb = 0, includeProb = 0, returnProb = 1.0;
if (patternList instanceof AscertainedSitePatterns) {
int[] includeIndices = ((AscertainedSitePatterns) patternList).getIncludePatternIndices();
int[] excludeIndices = ((AscertainedSitePatterns) patternList).getExcludePatternIndices();
for (int i = 0; i < ((AscertainedSitePatterns) patternList).getIncludePatternCount(); i++) {
int index = includeIndices[i];
includeProb += Math.exp(patternProbs[index]);
}
for (int j = 0; j < ((AscertainedSitePatterns) patternList).getExcludePatternCount(); j++) {
int index = excludeIndices[j];
excludeProb += Math.exp(patternProbs[index]);
}
if (includeProb == 0.0) {
returnProb -= excludeProb;
} else if (excludeProb == 0.0) {
returnProb = includeProb;
} else {
returnProb = includeProb - excludeProb;
}
}
return Math.log(returnProb);
}
/**
* Check whether the scaling is still required. If the sum of all the logScalingFactors
* is zero then we simply turn off the useScaling flag. This will speed up the likelihood
* calculations when scaling is not required.
*/
public void checkScaling() {
// if (useScaling) {
// if (scalingCheckCount % 1000 == 0) {
// double totalScalingFactor = 0.0;
// for (int i = 0; i < nodeCount; i++) {
// for (int j = 0; j < patternCount; j++) {
// totalScalingFactor += scalingFactors[currentPartialsIndices[i]][i][j];
// useScaling = totalScalingFactor < 0.0;
// Logger.getLogger("dr.evomodel").info("LikelihoodCore total log scaling factor: " + totalScalingFactor);
// if (!useScaling) {
// Logger.getLogger("dr.evomodel").info("LikelihoodCore scaling turned off.");
// scalingCheckCount++;
}
/**
* Traverse the tree calculating partial likelihoods.
*
* @return whether the partials for this node were recalculated.
*/
protected boolean traverse(Tree tree, NodeRef node) {
boolean update = false;
int nodeNum = node.getNumber();
NodeRef parent = tree.getParent(node);
// First update the transition probability matrix(ices) for this branch
if (parent != null && updateNode[nodeNum]) {
final double branchRate = branchRateModel.getBranchRate(tree, node);
// Get the operational time of the branch
final double branchTime = branchRate * (tree.getNodeHeight(parent) - tree.getNodeHeight(node));
if (branchTime < 0.0) {
throw new RuntimeException("Negative branch length: " + branchTime);
}
likelihoodCore.setNodeMatrixForUpdate(nodeNum);
for (int i = 0; i < categoryCount; i++) {
double branchLength = siteModel.getRateForCategory(i) * branchTime;
siteModel.getSubstitutionModel().getTransitionProbabilities(branchLength, probabilities);
likelihoodCore.setNodeMatrix(nodeNum, i, probabilities);
}
update = true;
}
// If the node is internal, update the partial likelihoods.
if (!tree.isExternal(node)) {
// Traverse down the two child nodes
NodeRef child1 = tree.getChild(node, 0);
final boolean update1 = traverse(tree, child1);
NodeRef child2 = tree.getChild(node, 1);
final boolean update2 = traverse(tree, child2);
// If either child node was updated then update this node too
if (update1 || update2) {
final int childNum1 = child1.getNumber();
final int childNum2 = child2.getNumber();
likelihoodCore.setNodePartialsForUpdate(nodeNum);
if (integrateAcrossCategories) {
likelihoodCore.calculatePartials(childNum1, childNum2, nodeNum);
} else {
likelihoodCore.calculatePartials(childNum1, childNum2, nodeNum, siteCategories);
}
if (COUNT_TOTAL_OPERATIONS) {
totalOperationCount ++;
}
if (parent == null) {
// No parent this is the root of the tree -
// calculate the pattern likelihoods
double[] frequencies = frequencyModel.getFrequencies();
double[] partials = getRootPartials();
likelihoodCore.calculateLogLikelihoods(partials, frequencies, patternLogLikelihoods);
}
update = true;
}
}
return update;
}
public final double[] getRootPartials() {
if (rootPartials == null) {
rootPartials = new double[patternCount * stateCount];
}
int nodeNum = treeModel.getRoot().getNumber();
if (integrateAcrossCategories) {
// moved this call to here, because non-integrating siteModels don't need to support it - AD
double[] proportions = siteModel.getCategoryProportions();
likelihoodCore.integratePartials(nodeNum, proportions, rootPartials);
} else {
likelihoodCore.getPartials(nodeNum, rootPartials);
}
return rootPartials;
}
/**
* the root partial likelihoods (a temporary array that is used
* to fetch the partials - it should not be examined directly -
* use getRootPartials() instead).
*/
private double[] rootPartials = null;
public class SiteLikelihoodsStatistic extends Statistic.Abstract {
public SiteLikelihoodsStatistic() {
super("siteLikelihoods");
}
public int getDimension() {
if (patternList instanceof SitePatterns) {
return ((SitePatterns)patternList).getSiteCount();
} else {
return patternList.getPatternCount();
}
}
public String getDimensionName(int dim) {
return getTreeModel().getId() + "site-" + dim;
}
public double getStatisticValue(int i) {
if (patternList instanceof SitePatterns) {
int index = ((SitePatterns)patternList).getPatternIndex(i);
if( index >= 0 ) {
return patternLogLikelihoods[index] / patternWeights[index];
} else {
return 0.0;
}
} else {
return patternList.getPatternCount();
}
}
}
/**
* The XML parser
*/
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return TREE_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
boolean allowMissingTaxa = xo.getAttribute(ALLOW_MISSING_TAXA, false);
boolean storePartials = xo.getAttribute(STORE_PARTIALS, true);
boolean forceJavaCore = xo.getAttribute(FORCE_JAVA_CORE, false);
if (Boolean.valueOf(System.getProperty("java_only"))) {
forceJavaCore = true;
}
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
SiteModel siteModel = (SiteModel) xo.getChild(SiteModel.class);
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
TipPartialsModel tipPartialsModel = (TipPartialsModel) xo.getChild(TipPartialsModel.class);
boolean forceRescaling = xo.getAttribute(FORCE_RESCALING,false);
return new TreeLikelihood(
patternList,
treeModel,
siteModel,
branchRateModel,
tipPartialsModel,
useAmbiguities, allowMissingTaxa, storePartials, forceJavaCore, forceRescaling);
}
/**
* the frequency model for these sites
*/
protected final FrequencyModel frequencyModel;
/**
* the site model for these sites
*/
protected final SiteModel siteModel;
/**
* the branch rate model
*/
protected final BranchRateModel branchRateModel;
/**
* the tip partials model
*/
private final TipPartialsModel tipPartialsModel;
private final boolean storePartials;
protected final boolean integrateAcrossCategories;
/**
* the categories for each site
*/
protected int[] siteCategories = null;
/**
* the pattern likelihoods
*/
protected double[] patternLogLikelihoods = null;
/**
* the number of rate categories
*/
protected int categoryCount;
/**
* an array used to transfer transition probabilities
*/
protected double[] probabilities;
/**
* an array used to transfer tip partials
*/
protected double[] tipPartials;
/**
* the LikelihoodCore
*/
protected LikelihoodCore likelihoodCore;
}
|
/*
* Gameboi
*/
package gameboi;
import java.nio.file.Path;
import java.io.File;
/**
*
* @author tomis007
*/
public class GameBoi {
/**
* @param argv the command line arguments
*/
public static void main(String[] argv) {
GBMem memory = new GBMem(loadRom());
CPU z80 = new CPU(memory);
GPU gpu = new GPU(memory, z80);
//Start the Gameboy fetch,decode,execute cycle
while (true) {
int count = 0;
long startTime = System.nanoTime();
while (count < 69905) {
int cycles;
cycles = z80.ExecuteOpcode();
gpu.updateGraphics(cycles);
count += cycles;
}
long sleepTime = 16700000 - (System.nanoTime() - startTime);
if (sleepTime > 0) {
try {
Thread.sleep(sleepTime / 1000000, (int)sleepTime % 1000000);
} catch (InterruptedException e) {
System.err.println("woops...");
}
}
}
}
/**
* gets Path object to a Rom with a simple GUI
*
* @return Path to selected rom
*/
private static Path loadRom() {
FileSelector fc = new FileSelector(System.getProperty("user.dir"));
File rom = fc.selectFile();
if (rom == null) {
System.err.println("Sorry, please select a ROM");
System.exit(1);
}
return rom.toPath();
}
}
|
package sort;
import org.apache.log4j.Logger;
import org.apache.log4j.NDC;
/**
Example code for log4j to viewed in conjunction with the {@link
examples.Sort Sort} class.
<p>SortAlgo uses the bubble sort algorithm to sort an integer
array. See also its <b><a href="doc-files/SortAlgo.java">source
code</a></b>.
@author Ceki Gülcü */
public class SortAlgo {
final static String className = SortAlgo.class.getName();
final static Logger CAT = Logger.getLogger(className);
final static Logger OUTER = Logger.getLogger(className + ".OUTER");
final static Logger INNER = Logger.getLogger(className + ".INNER");
final static Logger DUMP = Logger.getLogger(className + ".DUMP");
final static Logger SWAP = Logger.getLogger(className + ".SWAP");
int[] intArray;
SortAlgo(int[] intArray) {
this.intArray = intArray;
}
void bubbleSort() {
CAT.info( "Entered the sort method.");
for(int i = intArray.length -1; i >= 0 ; i
NDC.push("i=" + i);
OUTER.debug("in outer loop.");
for(int j = 0; j < i; j++) {
NDC.push("j=" + j);
// It is poor practice to ship code with log staments in tight loops.
// We do it anyway in this example.
INNER.debug( "in inner loop.");
if(intArray[j] > intArray[j+1])
swap(j, j+1);
NDC.pop();
}
NDC.pop();
}
}
void dump() {
if(! (this.intArray instanceof int[])) {
DUMP.error("Tried to dump an uninitialized array.");
return;
}
DUMP.info("Dump of integer array:");
for(int i = 0; i < this.intArray.length; i++) {
DUMP.info("Element [" + i + "]=" + this.intArray[i]);
}
}
void swap(int l, int r) {
// It is poor practice to ship code with log staments in tight
// loops or code called potentially millions of times.
SWAP.debug( "Swapping intArray["+l+"]=" + intArray[l] +
" and intArray["+r+"]=" + intArray[r]);
int temp = this.intArray[l];
this.intArray[l] = this.intArray[r];
this.intArray[r] = temp;
}
}
|
package hudson.maven;
import hudson.AbortException;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.EnvVars;
import hudson.scm.ChangeLogSet;
import hudson.FilePath.FileCallable;
import hudson.maven.MavenBuild.ProxyImpl2;
import hudson.maven.reporters.MavenFingerprinter;
import hudson.maven.reporters.MavenMailer;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Environment;
import hudson.model.Fingerprint;
import hudson.model.Hudson;
import hudson.model.ParametersAction;
import hudson.model.Result;
import hudson.model.Computer;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.model.Cause.UpstreamCause;
import hudson.remoting.Channel;
import hudson.remoting.VirtualChannel;
import hudson.tasks.BuildWrapper;
import hudson.tasks.MailSender;
import hudson.tasks.Maven.MavenInstallation;
import hudson.util.ArgumentListBuilder;
import hudson.util.StreamTaskListener;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.maven.BuildFailureException;
import org.apache.maven.embedder.MavenEmbedderException;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.execution.ReactorManager;
import org.apache.maven.lifecycle.LifecycleExecutionException;
import org.apache.maven.monitor.event.EventDispatcher;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.ProjectBuildingException;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import static hudson.model.Result.FAILURE;
/**
* {@link Build} for {@link MavenModuleSet}.
*
* <p>
* A "build" of {@link MavenModuleSet} consists of:
*
* <ol>
* <li>Update the workspace.
* <li>Parse POMs
* <li>Trigger module builds.
* </ol>
*
* This object remembers the changelog and what {@link MavenBuild}s are done
* on this.
*
* @author Kohsuke Kawaguchi
*/
public class MavenModuleSetBuild extends AbstractMavenBuild<MavenModuleSet,MavenModuleSetBuild> {
/**
* {@link MavenReporter}s that will contribute project actions.
* Can be null if there's none.
*/
/*package*/ List<MavenReporter> projectActionReporters;
public MavenModuleSetBuild(MavenModuleSet job) throws IOException {
super(job);
}
public MavenModuleSetBuild(MavenModuleSet project, File buildDir) throws IOException {
super(project, buildDir);
}
/**
* Exposes {@code MAVEN_OPTS} to forked processes.
*
* When we fork Maven, we do so directly by executing Java, thus this environment variable
* is pointless (we have to tweak JVM launch option correctly instead, which can be seen in
* {@link MavenProcessFactory}), but setting the environment variable explicitly is still
* useful in case this Maven forks other Maven processes via normal way. See HUDSON-3644.
*/
@Override
public EnvVars getEnvironment(TaskListener log) throws IOException, InterruptedException {
EnvVars envs = super.getEnvironment(log);
String opts = project.getMavenOpts();
if(opts!=null)
envs.put("MAVEN_OPTS", opts);
return envs;
}
/**
* Displays the combined status of all modules.
* <p>
* More precisely, this picks up the status of this build itself,
* plus all the latest builds of the modules that belongs to this build.
*/
@Override
public Result getResult() {
Result r = super.getResult();
for (MavenBuild b : getModuleLastBuilds().values()) {
Result br = b.getResult();
if(r==null)
r = br;
else
if(br==Result.NOT_BUILT)
continue; // UGLY: when computing combined status, ignore the modules that were not built
else
if(br!=null)
r = r.combine(br);
}
return r;
}
/**
* Returns the filtered changeset entries that match the given module.
*/
/*package*/ List<ChangeLogSet.Entry> getChangeSetFor(final MavenModule mod) {
return new ArrayList<ChangeLogSet.Entry>() {
{
// modules that are under 'mod'. lazily computed
List<MavenModule> subsidiaries = null;
for (ChangeLogSet.Entry e : getChangeSet()) {
if(isDescendantOf(e, mod)) {
if(subsidiaries==null)
subsidiaries = mod.getSubsidiaries();
// make sure at least one change belongs to this module proper,
// and not its subsidiary module
if (notInSubsidiary(subsidiaries, e))
add(e);
}
}
}
private boolean notInSubsidiary(List<MavenModule> subsidiaries, ChangeLogSet.Entry e) {
for (String path : e.getAffectedPaths())
if(!belongsToSubsidiary(subsidiaries, path))
return true;
return false;
}
private boolean belongsToSubsidiary(List<MavenModule> subsidiaries, String path) {
for (MavenModule sub : subsidiaries)
if(path.startsWith(sub.getRelativePath()))
return true;
return false;
}
/**
* Does this change happen somewhere in the given module or its descendants?
*/
private boolean isDescendantOf(ChangeLogSet.Entry e, MavenModule mod) {
for (String path : e.getAffectedPaths())
if(path.startsWith(mod.getRelativePath()))
return true;
return false;
}
};
}
/**
* Computes the module builds that correspond to this build.
* <p>
* A module may be built multiple times (by the user action),
* so the value is a list.
*/
public Map<MavenModule,List<MavenBuild>> getModuleBuilds() {
Collection<MavenModule> mods = getParent().getModules();
// identify the build number range. [start,end)
MavenModuleSetBuild nb = getNextBuild();
int end = nb!=null ? nb.getNumber() : Integer.MAX_VALUE;
// preserve the order by using LinkedHashMap
Map<MavenModule,List<MavenBuild>> r = new LinkedHashMap<MavenModule,List<MavenBuild>>(mods.size());
for (MavenModule m : mods) {
List<MavenBuild> builds = new ArrayList<MavenBuild>();
MavenBuild b = m.getNearestBuild(number);
while(b!=null && b.getNumber()<end) {
builds.add(b);
b = b.getNextBuild();
}
r.put(m,builds);
}
return r;
}
@Override
public synchronized void delete() throws IOException {
super.delete();
// Delete all contained module builds too
for (List<MavenBuild> list : getModuleBuilds().values())
for (MavenBuild build : list)
build.delete();
}
@Override
public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) {
// map corresponding module build under this object
if(token.indexOf('$')>0) {
MavenModule m = getProject().getModule(token);
if(m!=null) return m.getBuildByNumber(getNumber());
}
return super.getDynamic(token,req,rsp);
}
/**
* Computes the latest module builds that correspond to this build.
* (when indivudual modules are built, a new ModuleSetBuild is not created,
* but rather the new module build falls under the previous ModuleSetBuild)
*/
public Map<MavenModule,MavenBuild> getModuleLastBuilds() {
Collection<MavenModule> mods = getParent().getModules();
// identify the build number range. [start,end)
MavenModuleSetBuild nb = getNextBuild();
int end = nb!=null ? nb.getNumber() : Integer.MAX_VALUE;
// preserve the order by using LinkedHashMap
Map<MavenModule,MavenBuild> r = new LinkedHashMap<MavenModule,MavenBuild>(mods.size());
for (MavenModule m : mods) {
MavenBuild b = m.getNearestOldBuild(end - 1);
if(b!=null && b.getNumber()>=getNumber())
r.put(m,b);
}
return r;
}
public void registerAsProjectAction(MavenReporter reporter) {
if(projectActionReporters==null)
projectActionReporters = new ArrayList<MavenReporter>();
projectActionReporters.add(reporter);
}
/**
* Finds {@link Action}s from all the module builds that belong to this
* {@link MavenModuleSetBuild}. One action per one {@link MavenModule},
* and newer ones take precedence over older ones.
*/
public <T extends Action> List<T> findModuleBuildActions(Class<T> action) {
Collection<MavenModule> mods = getParent().getModules();
List<T> r = new ArrayList<T>(mods.size());
// identify the build number range. [start,end)
MavenModuleSetBuild nb = getNextBuild();
int end = nb!=null ? nb.getNumber()-1 : Integer.MAX_VALUE;
for (MavenModule m : mods) {
MavenBuild b = m.getNearestOldBuild(end);
while(b!=null && b.getNumber()>=number) {
T a = b.getAction(action);
if(a!=null) {
r.add(a);
break;
}
b = b.getPreviousBuild();
}
}
return r;
}
public void run() {
run(new RunnerImpl());
getProject().updateTransientActions();
}
@Override
public Fingerprint.RangeSet getDownstreamRelationship(AbstractProject that) {
Fingerprint.RangeSet rs = super.getDownstreamRelationship(that);
for(List<MavenBuild> builds : getModuleBuilds().values())
for (MavenBuild b : builds)
rs.add(b.getDownstreamRelationship(that));
return rs;
}
/**
* Called when a module build that corresponds to this module set build
* has completed.
*/
/*package*/ void notifyModuleBuild(MavenBuild newBuild) {
try {
// update module set build number
getParent().updateNextBuildNumber();
// update actions
Map<MavenModule, List<MavenBuild>> moduleBuilds = getModuleBuilds();
// actions need to be replaced atomically especially
// given that two builds might complete simultaneously.
synchronized(this) {
boolean modified = false;
List<Action> actions = getActions();
Set<Class<? extends AggregatableAction>> individuals = new HashSet<Class<? extends AggregatableAction>>();
for (Action a : actions) {
if(a instanceof MavenAggregatedReport) {
MavenAggregatedReport mar = (MavenAggregatedReport) a;
mar.update(moduleBuilds,newBuild);
individuals.add(mar.getIndividualActionType());
modified = true;
}
}
// see if the new build has any new aggregatable action that we haven't seen.
for (AggregatableAction aa : newBuild.getActions(AggregatableAction.class)) {
if(individuals.add(aa.getClass())) {
// new AggregatableAction
MavenAggregatedReport mar = aa.createAggregatedAction(this, moduleBuilds);
mar.update(moduleBuilds,newBuild);
actions.add(mar);
modified = true;
}
}
if(modified) {
save();
getProject().updateTransientActions();
}
}
// symlink to this module build
String moduleFsName = newBuild.getProject().getModuleName().toFileSystemName();
Util.createSymlink(getRootDir(),
"../../modules/"+ moduleFsName +"/builds/"+newBuild.getId() /*ugly!*/,
moduleFsName, StreamTaskListener.NULL);
} catch (IOException e) {
LOGGER.log(Level.WARNING,"Failed to update "+this,e);
} catch (InterruptedException e) {
LOGGER.log(Level.WARNING,"Failed to update "+this,e);
}
}
/**
* The sole job of the {@link MavenModuleSet} build is to update SCM
* and triggers module builds.
*/
private class RunnerImpl extends AbstractRunner {
private Map<ModuleName,MavenBuild.ProxyImpl2> proxies;
protected Result doRun(final BuildListener listener) throws Exception {
PrintStream logger = listener.getLogger();
try {
EnvVars envVars = getEnvironment(listener);
MavenInstallation mvn = project.getMaven();
if(mvn==null)
throw new AbortException("A Maven installation needs to be available for this project to be built.\n"+
"Either your server has no Maven installations defined, or the requested Maven version does not exist.");
mvn = mvn.forEnvironment(envVars).forNode(Computer.currentComputer().getNode(), listener);
if(!project.isAggregatorStyleBuild()) {
parsePoms(listener, logger, envVars, mvn);
// start module builds
logger.println("Triggering "+project.getRootModule().getModuleName());
project.getRootModule().scheduleBuild(new UpstreamCause((Run<?,?>)MavenModuleSetBuild.this));
} else {
// do builds here
try {
List<BuildWrapper> wrappers = new ArrayList<BuildWrapper>();
for (BuildWrapper w : project.getBuildWrappersList())
wrappers.add(w);
ParametersAction parameters = getAction(ParametersAction.class);
if (parameters != null)
parameters.createBuildWrappers(MavenModuleSetBuild.this,wrappers);
for( BuildWrapper w : wrappers) {
Environment e = w.setUp(MavenModuleSetBuild.this, launcher, listener);
if(e==null)
return Result.FAILURE;
buildEnvironments.add(e);
e.buildEnvVars(envVars); // #3502: too late for getEnvironment to do this
}
if(!preBuild(listener, project.getPublishers()))
return Result.FAILURE;
parsePoms(listener, logger, envVars, mvn); // #5428 : do pre-build *before* parsing pom
SplittableBuildListener slistener = new SplittableBuildListener(listener);
proxies = new HashMap<ModuleName, ProxyImpl2>();
List<String> changedModules = new ArrayList<String>();
for (MavenModule m : project.sortedActiveModules) {
MavenBuild mb = m.newBuild();
// Check if incrementalBuild is selected and that there are changes -
// we act as if incrementalBuild is not set if there are no changes.
if (!MavenModuleSetBuild.this.getChangeSet().isEmptySet()
&& project.isIncrementalBuild()) {
// If there are changes for this module, add it.
// Also add it if we've never seen this module before,
// or if the previous build of this module failed or was unstable.
if ((mb.getPreviousBuiltBuild() == null) ||
(!getChangeSetFor(m).isEmpty())
|| (mb.getPreviousBuiltBuild().getResult().isWorseThan(Result.SUCCESS))) {
changedModules.add(m.getModuleName().toString());
}
}
mb.setWorkspace(getModuleRoot().child(m.getRelativePath()));
proxies.put(m.getModuleName(), mb.new ProxyImpl2(MavenModuleSetBuild.this,slistener));
}
// run the complete build here
// figure out the root POM location.
// choice of module root ('ws' in this method) is somewhat arbitrary
// when multiple CVS/SVN modules are checked out, so also check
// the path against the workspace root if that seems like what the user meant (see issue #1293)
String rootPOM = project.getRootPOM();
FilePath pom = getModuleRoot().child(rootPOM);
FilePath parentLoc = getWorkspace().child(rootPOM);
if(!pom.exists() && parentLoc.exists())
pom = parentLoc;
ProcessCache.MavenProcess process = MavenBuild.mavenProcessCache.get(launcher.getChannel(), slistener,
new MavenProcessFactory(project,launcher,envVars,pom.getParent()));
ArgumentListBuilder margs = new ArgumentListBuilder().add("-B").add("-f", pom.getRemote());
if(project.usesPrivateRepository())
margs.add("-Dmaven.repo.local="+getWorkspace().child(".repository"));
// If incrementalBuild is set, and we're on Maven 2.1 or later, *and* there's at least one module
// listed in changedModules, do the Maven incremental build commands - if there are no changed modules,
// We're building everything anyway.
if (project.isIncrementalBuild() && mvn.isMaven2_1(launcher) && !changedModules.isEmpty()) {
margs.add("-amd");
margs.add("-pl", Util.join(changedModules, ","));
}
if (project.getAlternateSettings() != null) {
margs.add("-s").add(getWorkspace().child(project.getAlternateSettings()));
}
margs.addTokenized(envVars.expand(project.getGoals()));
Builder builder = new Builder(slistener, proxies, project.sortedActiveModules, margs.toList(), envVars);
MavenProbeAction mpa=null;
try {
mpa = new MavenProbeAction(project,process.channel);
addAction(mpa);
return process.call(builder);
} finally {
builder.end(launcher);
getActions().remove(mpa);
process.discard();
}
} finally {
// tear down in reverse order
boolean failed=false;
for( int i=buildEnvironments.size()-1; i>=0; i
if (!buildEnvironments.get(i).tearDown(MavenModuleSetBuild.this,listener)) {
failed=true;
}
}
buildEnvironments = null;
// WARNING The return in the finally clause will trump any return before
if (failed) return Result.FAILURE;
}
}
return null;
} catch (AbortException e) {
if(e.getMessage()!=null)
listener.error(e.getMessage());
return Result.FAILURE;
} catch (InterruptedIOException e) {
e.printStackTrace(listener.error("Aborted Maven execution for InterruptedIOException"));
return Result.ABORTED;
} catch (InterruptedException e) {
e.printStackTrace(listener.error("Aborted Maven execution for InterruptedException"));
return Result.ABORTED;
} catch (IOException e) {
e.printStackTrace(listener.error(Messages.MavenModuleSetBuild_FailedToParsePom()));
return Result.FAILURE;
} catch (RunnerAbortedException e) {
return Result.FAILURE;
} catch (RuntimeException e) {
// bug in the code.
e.printStackTrace(listener.error("Processing failed due to a bug in the code. Please report this to users@hudson.dev.java.net"));
logger.println("project="+project);
logger.println("project.getModules()="+project.getModules());
logger.println("project.getRootModule()="+project.getRootModule());
throw e;
}
}
private void parsePoms(BuildListener listener, PrintStream logger, EnvVars envVars, MavenInstallation mvn) throws IOException, InterruptedException {
logger.println("Parsing POMs");
List<PomInfo> poms;
try {
poms = getModuleRoot().act(new PomParser(listener, mvn, project));
} catch (IOException e) {
if (e.getCause() instanceof AbortException)
throw (AbortException) e.getCause();
throw e;
} catch (MavenExecutionException e) {
// Maven failed to parse POM
e.getCause().printStackTrace(listener.error(Messages.MavenModuleSetBuild_FailedToParsePom()));
throw new AbortException();
}
// update the module list
Map<ModuleName,MavenModule> modules = project.modules;
synchronized(modules) {
Map<ModuleName,MavenModule> old = new HashMap<ModuleName, MavenModule>(modules);
List<MavenModule> sortedModules = new ArrayList<MavenModule>();
modules.clear();
if(debug)
logger.println("Root POM is "+poms.get(0).name);
project.reconfigure(poms.get(0));
for (PomInfo pom : poms) {
MavenModule mm = old.get(pom.name);
if(mm!=null) {// found an existing matching module
if(debug)
logger.println("Reconfiguring "+mm);
mm.reconfigure(pom);
modules.put(pom.name,mm);
} else {// this looks like a new module
logger.println(Messages.MavenModuleSetBuild_DiscoveredModule(pom.name,pom.displayName));
mm = new MavenModule(project,pom,getNumber());
modules.put(mm.getModuleName(),mm);
}
sortedModules.add(mm);
mm.save();
}
// at this point the list contains all the live modules
project.sortedActiveModules = sortedModules;
// remaining modules are no longer active.
old.keySet().removeAll(modules.keySet());
for (MavenModule om : old.values()) {
if(debug)
logger.println("Disabling "+om);
om.makeDisabled(true);
}
modules.putAll(old);
}
// we might have added new modules
Hudson.getInstance().rebuildDependencyGraph();
// module builds must start with this build's number
for (MavenModule m : modules.values())
m.updateNextBuildNumber(getNumber());
}
protected void post2(BuildListener listener) throws Exception {
// asynchronous executions from the build might have left some unsaved state,
// so just to be safe, save them all.
for (MavenBuild b : getModuleLastBuilds().values())
b.save();
// at this point the result is all set, so ignore the return value
if (!performAllBuildSteps(listener, project.getPublishers(), true))
setResult(FAILURE);
if (!performAllBuildSteps(listener, project.getProperties(), true))
setResult(FAILURE);
// aggregate all module fingerprints to us,
// so that dependencies between module builds can be understood as
// dependencies between module set builds.
// TODO: we really want to implement this as a publisher,
// but we don't want to ask for a user configuration, nor should it
// show up in the persisted record.
MavenFingerprinter.aggregate(MavenModuleSetBuild.this);
}
@Override
public void cleanUp(BuildListener listener) throws Exception {
if(project.isAggregatorStyleBuild()) {
// schedule downstream builds. for non aggregator style builds,
// this is done by each module
scheduleDownstreamBuilds(listener);
}
MavenMailer mailer = project.getReporters().get(MavenMailer.class);
if (mailer != null) {
new MailSender(mailer.recipients,
mailer.dontNotifyEveryUnstableBuild,
mailer.sendToIndividuals).execute(MavenModuleSetBuild.this, listener);
}
// too late to set the build result at this point. so ignore failures.
performAllBuildSteps(listener, project.getPublishers(), false);
performAllBuildSteps(listener, project.getProperties(), false);
}
}
/**
* Runs Maven and builds the project.
*
* This is only used for
* {@link MavenModuleSet#isAggregatorStyleBuild() the aggregator style build}.
*/
private static final class Builder extends MavenBuilder {
private final Map<ModuleName,MavenBuildProxy2> proxies;
private final Map<ModuleName,List<MavenReporter>> reporters = new HashMap<ModuleName,List<MavenReporter>>();
private final Map<ModuleName,List<ExecutedMojo>> executedMojos = new HashMap<ModuleName,List<ExecutedMojo>>();
private long mojoStartTime;
private MavenBuildProxy2 lastProxy;
/**
* Kept so that we can finalize them in the end method.
*/
private final transient Map<ModuleName,ProxyImpl2> sourceProxies;
public Builder(BuildListener listener,Map<ModuleName,ProxyImpl2> proxies, Collection<MavenModule> modules, List<String> goals, Map<String,String> systemProps) {
super(listener,goals,systemProps);
this.sourceProxies = proxies;
this.proxies = new HashMap<ModuleName, MavenBuildProxy2>(proxies);
for (Entry<ModuleName,MavenBuildProxy2> e : this.proxies.entrySet())
e.setValue(new FilterImpl(e.getValue()));
for (MavenModule m : modules)
reporters.put(m.getModuleName(),m.createReporters());
}
private class FilterImpl extends MavenBuildProxy2.Filter<MavenBuildProxy2> implements Serializable {
public FilterImpl(MavenBuildProxy2 core) {
super(core);
}
@Override
public void executeAsync(final BuildCallable<?,?> program) throws IOException {
futures.add(Channel.current().callAsync(new AsyncInvoker(core,program)));
}
private static final long serialVersionUID = 1L;
}
/**
* Invoked after the maven has finished running, and in the master, not in the maven process.
*/
void end(Launcher launcher) throws IOException, InterruptedException {
for (Map.Entry<ModuleName,ProxyImpl2> e : sourceProxies.entrySet()) {
ProxyImpl2 p = e.getValue();
for (MavenReporter r : reporters.get(e.getKey())) {
// we'd love to do this when the module build ends, but doing so requires
// we know how many task segments are in the current build.
r.end(p.owner(),launcher,listener);
p.appendLastLog();
}
p.close();
}
}
@Override
public Result call() throws IOException {
try {
return super.call();
} finally {
if(lastProxy!=null)
lastProxy.appendLastLog();
}
}
@Override
void preBuild(MavenSession session, ReactorManager rm, EventDispatcher dispatcher) throws BuildFailureException, LifecycleExecutionException, IOException, InterruptedException {
// set all modules which are not actually being build (in incremental builds) to NOT_BUILD
@SuppressWarnings("unchecked")
List<MavenProject> projects = rm.getSortedProjects();
Set<ModuleName> buildingProjects = new HashSet<ModuleName>();
for (MavenProject p : projects) {
buildingProjects.add(new ModuleName(p));
}
for (Entry<ModuleName,MavenBuildProxy2> e : this.proxies.entrySet()) {
if (! buildingProjects.contains(e.getKey())) {
e.getValue().setResult(Result.NOT_BUILT);
e.getValue().end();
}
}
}
void postBuild(MavenSession session, ReactorManager rm, EventDispatcher dispatcher) throws BuildFailureException, LifecycleExecutionException, IOException, InterruptedException {
// TODO
}
void preModule(MavenProject project) throws InterruptedException, IOException, hudson.maven.agent.AbortException {
ModuleName name = new ModuleName(project);
MavenBuildProxy2 proxy = proxies.get(name);
listener.getLogger().flush(); // make sure the data until here are all written
proxy.start();
for (MavenReporter r : reporters.get(name))
if(!r.preBuild(proxy,project,listener))
throw new hudson.maven.agent.AbortException(r+" failed");
}
void postModule(MavenProject project) throws InterruptedException, IOException, hudson.maven.agent.AbortException {
ModuleName name = new ModuleName(project);
MavenBuildProxy2 proxy = proxies.get(name);
List<MavenReporter> rs = reporters.get(name);
if(rs==null) { // probe for issue #906
throw new AssertionError("reporters.get("+name+")==null. reporters="+reporters+" proxies="+proxies);
}
for (MavenReporter r : rs)
if(!r.postBuild(proxy,project,listener))
throw new hudson.maven.agent.AbortException(r+" failed");
proxy.setExecutedMojos(executedMojos.get(name));
listener.getLogger().flush(); // make sure the data until here are all written
proxy.end();
lastProxy = proxy;
}
void preExecute(MavenProject project, MojoInfo mojoInfo) throws IOException, InterruptedException, hudson.maven.agent.AbortException {
ModuleName name = new ModuleName(project);
MavenBuildProxy proxy = proxies.get(name);
for (MavenReporter r : reporters.get(name))
if(!r.preExecute(proxy,project,mojoInfo,listener))
throw new hudson.maven.agent.AbortException(r+" failed");
mojoStartTime = System.currentTimeMillis();
}
void postExecute(MavenProject project, MojoInfo mojoInfo, Exception exception) throws IOException, InterruptedException, hudson.maven.agent.AbortException {
ModuleName name = new ModuleName(project);
List<ExecutedMojo> mojoList = executedMojos.get(name);
if(mojoList==null)
executedMojos.put(name,mojoList=new ArrayList<ExecutedMojo>());
mojoList.add(new ExecutedMojo(mojoInfo,System.currentTimeMillis()-mojoStartTime));
MavenBuildProxy2 proxy = proxies.get(name);
for (MavenReporter r : reporters.get(name))
if(!r.postExecute(proxy,project,mojoInfo,listener,exception))
throw new hudson.maven.agent.AbortException(r+" failed");
if(exception!=null)
proxy.setResult(Result.FAILURE);
}
void onReportGenerated(MavenProject project, MavenReportInfo report) throws IOException, InterruptedException, hudson.maven.agent.AbortException {
ModuleName name = new ModuleName(project);
MavenBuildProxy proxy = proxies.get(name);
for (MavenReporter r : reporters.get(name))
if(!r.reportGenerated(proxy,project,report,listener))
throw new hudson.maven.agent.AbortException(r+" failed");
}
private static final long serialVersionUID = 1L;
}
/**
* Used to tunnel exception from Maven through remoting.
*/
private static final class MavenExecutionException extends RuntimeException {
private MavenExecutionException(Exception cause) {
super(cause);
}
@Override
public Exception getCause() {
return (Exception)super.getCause();
}
private static final long serialVersionUID = 1L;
}
/**
* Executed on the slave to parse POM and extract information into {@link PomInfo},
* which will be then brought back to the master.
*/
private static final class PomParser implements FileCallable<List<PomInfo>> {
private final BuildListener listener;
private final String rootPOM;
/**
* Capture the value of the static field so that the debug flag
* takes an effect even when {@link PomParser} runs in a slave.
*/
private final boolean verbose = debug;
private final MavenInstallation mavenHome;
private final String profiles;
private final Properties properties;
private final String privateRepository;
private final String alternateSettings;
private final boolean nonRecursive;
// We're called against the module root, not the workspace, which can cause a lot of confusion.
private final String workspaceProper;
public PomParser(BuildListener listener, MavenInstallation mavenHome, MavenModuleSet project) {
// project cannot be shipped to the remote JVM, so all the relevant properties need to be captured now.
this.listener = listener;
this.mavenHome = mavenHome;
this.rootPOM = project.getRootPOM();
this.profiles = project.getProfiles();
this.properties = project.getMavenProperties();
this.nonRecursive = project.isNonRecursive();
this.workspaceProper = project.getLastBuild().getWorkspace().getRemote();
if (project.usesPrivateRepository()) {
this.privateRepository = project.getLastBuild().getWorkspace().child(".repository").getRemote();
} else {
this.privateRepository = null;
}
this.alternateSettings = project.getAlternateSettings();
}
/**
* Computes the path of {@link #rootPOM}.
*
* Returns "abc" if rootPOM="abc/pom.xml"
* If rootPOM="pom.xml", this method returns "".
*/
private String getRootPath() {
int idx = Math.max(rootPOM.lastIndexOf('/'), rootPOM.lastIndexOf('\\'));
if(idx==-1) return "";
return rootPOM.substring(0,idx);
}
public List<PomInfo> invoke(File ws, VirtualChannel channel) throws IOException {
File pom = new File(ws,rootPOM);
PrintStream logger = listener.getLogger();
// choice of module root ('ws' in this method) is somewhat arbitrary
// when multiple CVS/SVN modules are checked out, so also check
// the path against the workspace root if that seems like what the user meant (see issue #1293)
File parentLoc = new File(ws.getParentFile(),rootPOM);
if(!pom.exists() && parentLoc.exists())
pom = parentLoc;
if(!pom.exists())
throw new AbortException(Messages.MavenModuleSetBuild_NoSuchPOMFile(pom));
if(verbose)
logger.println("Parsing "
+ (nonRecursive ? "non-recursively " : "recursively ")
+ pom);
File settingsLoc = (alternateSettings == null) ? null
: new File(workspaceProper, alternateSettings);
if ((settingsLoc != null) && (!settingsLoc.exists())) {
throw new AbortException(Messages.MavenModuleSetBuild_NoSuchAlternateSettings(settingsLoc.getAbsolutePath()));
}
try {
MavenEmbedder embedder = MavenUtil.
createEmbedder(listener, mavenHome.getHomeDir(), profiles,
properties, privateRepository, settingsLoc);
MavenProject mp = embedder.readProject(pom);
Map<MavenProject,String> relPath = new HashMap<MavenProject,String>();
MavenUtil.resolveModules(embedder,mp,getRootPath(),relPath,listener,nonRecursive);
if(verbose) {
for (Entry<MavenProject, String> e : relPath.entrySet())
logger.printf("Discovered %s at %s\n",e.getKey().getId(),e.getValue());
}
List<PomInfo> infos = new ArrayList<PomInfo>();
toPomInfo(mp,null,relPath,infos);
for (PomInfo pi : infos)
pi.cutCycle();
embedder.stop();
return infos;
} catch (MavenEmbedderException e) {
throw new MavenExecutionException(e);
} catch (ProjectBuildingException e) {
throw new MavenExecutionException(e);
}
}
private void toPomInfo(MavenProject mp, PomInfo parent, Map<MavenProject,String> relPath, List<PomInfo> infos) {
PomInfo pi = new PomInfo(mp, parent, relPath.get(mp));
infos.add(pi);
for (MavenProject child : (List<MavenProject>)mp.getCollectedProjects())
toPomInfo(child,pi,relPath,infos);
}
private static final long serialVersionUID = 1L;
}
private static final Logger LOGGER = Logger.getLogger(MavenModuleSetBuild.class.getName());
/**
* Extra verbose debug switch.
*/
public static boolean debug = false;
@Override
public MavenModuleSet getParent() {// don't know why, but javac wants this
return super.getParent();
}
}
|
package edacc.configurator.aac.racing;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.rosuda.JRI.Rengine;
import edacc.api.API;
import edacc.configurator.aac.AAC;
import edacc.configurator.aac.InstanceIdSeed;
import edacc.configurator.aac.Parameters;
import edacc.configurator.aac.SolverConfiguration;
import edacc.configurator.aac.course.StratifiedClusterCourse;
import edacc.configurator.aac.util.RInterface;
import edacc.model.ConfigurationScenarioDAO;
import edacc.model.ExperimentDAO;
import edacc.model.ExperimentResult;
import edacc.model.Instance;
import edacc.model.InstanceDAO;
import edacc.model.ResultCode;
import edacc.model.StatusCode;
import edacc.model.Experiment.Cost;
import edacc.configurator.aac.JobListener;
public class DefaultSMBO extends RacingMethods implements JobListener {
SolverConfiguration bestSC;
int incumbentNumber;
int num_instances;
private int numSCs = 0;
private int curThreshold = 0;
private List<InstanceIdSeed> completeCourse;
private Rengine rengine;
private Map<Integer, Integer> limitByInstance = new HashMap<Integer, Integer>();
private int increaseIncumbentRunsEvery = 32;
private String featureFolder = null;
private String featureCacheFolder = null;
private boolean useClusterCourse = false;
// when selecting jobs from the incumbent, prefer jobs that didn't time out
private boolean aggressiveJobSelection = false;
private boolean adaptiveCapping = false;
private float slackFactor = 1.5f;
public boolean initialDesignMode = true;
HashSet<Integer> stopEvalSolverConfigIds = new HashSet<Integer>();
Set<SolverConfiguration> challengers = new HashSet<SolverConfiguration>();
public DefaultSMBO(AAC aac, Random rng, API api, Parameters parameters, List<SolverConfiguration> firstSCs, List<SolverConfiguration> referenceSCs) throws Exception {
super(aac, rng, api, parameters, firstSCs, referenceSCs);
aac.addJobListener(this);
incumbentNumber = 0;
num_instances = ConfigurationScenarioDAO.getConfigurationScenarioByExperimentId(parameters.getIdExperiment()).getCourse().getInitialLength();
String val;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_increaseIncumbentRunsEvery")) != null)
increaseIncumbentRunsEvery = Integer.valueOf(val);
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_aggressiveJobSelection")) != null)
aggressiveJobSelection = Integer.valueOf(val) == 1;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_featureFolder")) != null)
featureFolder = val;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_featureCacheFolder")) != null)
featureCacheFolder = val;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_useClusterCourse")) != null)
useClusterCourse = Integer.valueOf(val) == 1;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_adaptiveCapping")) != null)
adaptiveCapping = Integer.valueOf(val) == 1;
if ((val = parameters.getRacingMethodParameters().get("DefaultSMBO_slackFactor")) != null)
slackFactor = Float.valueOf(val);
if (useClusterCourse) {
rengine = RInterface.getRengine();
if (rengine.eval("library(asbio)") == null) {
rengine.end();
throw new Exception("Did not find R library asbio (try running install.packages(\"asbio\")).");
}
if (rengine.eval("library(survival)") == null) {
rengine.end();
throw new Exception("Did not find R library survival (should come with R though).");
}
StratifiedClusterCourse course = new StratifiedClusterCourse(rengine, api.getExperimentInstances(parameters.getIdExperiment()), null, null, parameters.getMaxParcoursExpansionFactor(), rng, featureFolder, featureCacheFolder);
this.completeCourse = course.getCourse();
List<Instance> instances = InstanceDAO.getAllByExperimentId(parameters.getIdExperiment());
Map<Integer, Instance> instanceById = new HashMap<Integer, Instance>();
for (Instance i: instances) instanceById.put(i.getId(), i);
pacc.log("[DefaultSMBO] Clustered instances into " + course.getK() + " clusters. Complete course:");
for (InstanceIdSeed isp: completeCourse) {
pacc.log("[DefaultSMBO] " + instanceById.get(isp.instanceId) + ", " + isp.seed);
}
}
curThreshold = increaseIncumbentRunsEvery;
if (!firstSCs.isEmpty()) {
initBestSC(firstSCs.get(0));
}
}
private void initBestSC(SolverConfiguration sc) throws Exception {
this.bestSC = firstSCs.get(0);
bestSC.setIncumbentNumber(incumbentNumber++);
pacc.log("i " + pacc.getWallTime() + " ," + bestSC.getCost() + ", n.A.," + bestSC.getIdSolverConfiguration() + ", n.A.," + bestSC.getParameterConfiguration().toString());
int expansion = 0;
if (bestSC.getJobCount() < parameters.getMaxParcoursExpansionFactor() * num_instances) {
expansion = Math.min(parameters.getMaxParcoursExpansionFactor() * num_instances - bestSC.getJobCount(), parameters.getInitialDefaultParcoursLength());
if (useClusterCourse) {
for (int i = 0; i < expansion; i++) {
pacc.addJob(bestSC, completeCourse.get(bestSC.getJobCount()).seed,
completeCourse.get(bestSC.getJobCount()).instanceId, parameters.getMaxParcoursExpansionFactor()
* num_instances - bestSC.getJobCount());
}
} else {
pacc.expandParcoursSC(bestSC, expansion);
}
}
if (expansion > 0) {
pacc.log("c Expanding parcours of best solver config " + bestSC.getIdSolverConfiguration() + " by " + expansion);
}
// update the status of the jobs of bestSC and if first level wait
// also for jobs to finish
/*if (expansion > 0) {
pacc.log("c Waiting for currently best solver config " + bestSC.getIdSolverConfiguration() + " to finish " + expansion + "job(s)");
while (true) {
pacc.updateJobsStatus(bestSC);
if (bestSC.getNotStartedJobs().isEmpty() && bestSC.getRunningJobs().isEmpty()) {
break;
}
pacc.sleep(1000);
}
pacc.validateIncumbent(bestSC);
} else {
pacc.updateJobsStatus(bestSC);
}*/
}
public String toString(){
return "DefaultSMBO racing method";
}
@Override
public int compareTo(SolverConfiguration sc1, SolverConfiguration sc2) {
return sc1.compareTo(sc2);
}
@Override
public void solverConfigurationsFinished(List<SolverConfiguration> scs) throws Exception {
if (initialDesignMode) {
pacc.updateJobsStatus(bestSC);
scs.clear();
scs.addAll(challengers);
scs.add(bestSC);
}
for (SolverConfiguration sc : scs) {
if (sc.getJobCount() != sc.getFinishedJobs().size()) continue;
if (sc == bestSC) {
continue;
}
for (ExperimentResult er : sc.getJobs()) {
ExperimentResult apiER = api.getJob(er.getId());
if (limitByInstance.get(er.getInstanceId()) == null) continue;
er.setCPUTimeLimit(limitByInstance.get(er.getInstanceId()));
apiER.setCPUTimeLimit(limitByInstance.get(er.getInstanceId()));
if (er.getResultTime() > limitByInstance.get(er.getInstanceId()) && er.getResultCode().isCorrect()) {
pacc.log("Setting time limit exceeded to job " + er.getId() + ".");
er.setStatus(StatusCode.TIMELIMIT);
apiER.setStatus(StatusCode.TIMELIMIT);
er.setResultCode(ResultCode.UNKNOWN);
apiER.setResultCode(ResultCode.UNKNOWN);
}
}
int comp = compareTo(sc, bestSC);
if (!stopEvalSolverConfigIds.contains(sc.getIdSolverConfiguration()) && comp >= 0) {
if (sc.getJobCount() == bestSC.getJobCount()) {
sc.setFinished(true);
// all jobs from bestSC computed and won against
// best:
if (comp > 0) {
bestSC = sc;
sc.setIncumbentNumber(incumbentNumber++);
pacc.log("new incumbent: " + sc.getIdSolverConfiguration() + ":" + pacc.getWallTime() + ":" + pacc.getCumulatedCPUTime() + ":" + sc.getCost());
pacc.log("i " + pacc.getWallTime() + "," + sc.getCost() + ",n.A. ," + sc.getIdSolverConfiguration() + ",n.A. ," + sc.getParameterConfiguration().toString());
pacc.validateIncumbent(bestSC);
}
challengers.remove(sc);
// api.updateSolverConfigurationCost(sc.getIdSolverConfiguration(),
// sc.getCost(),
// statistics.getCostFunction());
// listNewSC.remove(i);
} else {
int generated = 0;
if (aggressiveJobSelection) {
generated = pacc.addRandomJobAggressive(sc.getJobCount(), sc, bestSC, sc.getJobCount());
} else {
generated = pacc.addRandomJob(sc.getJobCount(), sc, bestSC, sc.getJobCount());
}
pacc.log("c Generated " + generated + " jobs for solver config id " + sc.getIdSolverConfiguration());
pacc.addSolverConfigurationToListNewSC(sc);
}
} else {// lost against best on part of the actual (or should not be evaluated anymore)
// parcours:
stopEvalSolverConfigIds.remove(sc.getIdSolverConfiguration());
challengers.remove(sc);
sc.setFinished(true);
if (parameters.isDeleteSolverConfigs())
api.removeSolverConfig(sc.getIdSolverConfiguration());
pacc.log("d Solver config " + sc.getIdSolverConfiguration() + " with cost " + sc.getCost() + " lost against best solver config on " + sc.getJobCount() + " runs.");
api.updateSolverConfigurationName(sc.getIdSolverConfiguration(), "* " + sc.getName());
}
}
}
@Override
public void solverConfigurationsCreated(List<SolverConfiguration> scs) throws Exception {
if (scs.isEmpty())
return;
if (bestSC == null) {
initBestSC(scs.get(0));
scs.remove(0);
}
// First, check if we can update the incumbent
this.solverConfigurationsFinished(new LinkedList<SolverConfiguration>(challengers));
for (SolverConfiguration sc : scs) {
if (initialDesignMode) {
if (useClusterCourse) {
for (int i = 0; i < parameters.getInitialDefaultParcoursLength(); i++) {
pacc.addJob(sc, completeCourse.get(sc.getJobCount()).seed,
completeCourse.get(sc.getJobCount()).instanceId, sc.getJobCount());
}
} else {
pacc.expandParcoursSC(sc, parameters.getInitialDefaultParcoursLength());
}
} else {
if (aggressiveJobSelection) {
pacc.addRandomJobAggressive(parameters.getMinRuns(), sc, bestSC, sc.getJobCount());
} else {
pacc.addRandomJob(parameters.getMinRuns(), sc, bestSC, sc.getJobCount());
}
}
pacc.addSolverConfigurationToListNewSC(sc);
}
if (!initialDesignMode) {
for (int i = 0; i < scs.size(); i++) {
numSCs += 1;
if (numSCs > curThreshold && bestSC.getJobCount() < parameters.getMaxParcoursExpansionFactor() * num_instances) {
pacc.log("c Expanding parcours of best solver config " + bestSC.getIdSolverConfiguration() + " by 1");
if (useClusterCourse) {
if (bestSC.getJobCount() < completeCourse.size()) {
pacc.addJob(bestSC, completeCourse.get(bestSC.getJobCount()).seed,
completeCourse.get(bestSC.getJobCount()).instanceId, bestSC.getJobCount());
} else {
pacc.log("c Incumbent reached maximum number of evaluations. No more jobs are generated for it.");
}
} else {
pacc.expandParcoursSC(bestSC, 1);
}
pacc.addSolverConfigurationToListNewSC(bestSC);
curThreshold += increaseIncumbentRunsEvery;
}
}
}
challengers.addAll(scs);
}
@Override
public int computeOptimalExpansion(int coreCount, int jobs, int listNewSCSize) {
if (coreCount < parameters.getMinCPUCount() || coreCount > parameters.getMaxCPUCount()) {
pacc.log("w Warning: Current core count is " + coreCount);
}
if (parameters.getJobCPUTimeLimit() > 10) {
if (Math.max(0, coreCount - jobs) > 0) {
pacc.log("c [DefaultSMBO] coreCount: " + coreCount + ", Jobs to finish: " + jobs);
}
return Math.max(0, coreCount - jobs);
} else {
return Math.max(0, 2 * coreCount - jobs);
}
/*if (challengers.size() > 0) {
return 0;
}
else {
pacc.log("c Requesting " + coreCount + " configurations from search");
return coreCount;
}*/
/*int min_sc = (Math.max(Math.round(4.f * coreCount), 8) - jobs) / parameters.getMinRuns();
if (min_sc > 0) {
res = (Math.max(Math.round(6.f * coreCount), 8) - jobs) / parameters.getMinRuns();
}
if (listNewSCSize == 0 && res == 0) {
res = 1;
}
return res;*/
}
@Override
public List<String> getParameters() {
List<String> p = new LinkedList<String>();
p.add("%
p.add("DefaultSMBO_adaptiveCapping = " + (adaptiveCapping ? 1 : 0) + " % (Use adaptive capping mechanism)");
return p;
}
@Override
public List<SolverConfiguration> getBestSolverConfigurations() {
List<SolverConfiguration> res = new LinkedList<SolverConfiguration>();
if (bestSC != null) {
res.add(bestSC);
}
return res;
}
@Override
public void stopEvaluation(List<SolverConfiguration> scs) throws Exception {
for (SolverConfiguration sc : scs) {
stopEvalSolverConfigIds.add(sc.getIdSolverConfiguration());
}
}
@Override
public void raceFinished() {
// TODO Auto-generated method stub
}
@Override
public void jobsFinished(List<ExperimentResult> result) throws Exception {
// adapt instance specific limits
if (adaptiveCapping && ExperimentDAO.getById(parameters.getIdExperiment()).getDefaultCost().equals(Cost.resultTime)) {
boolean anyIncumbentRunsFinished = false;
for (ExperimentResult run: result) {
if (run.getSolverConfigId() == bestSC.getIdSolverConfiguration()) {
anyIncumbentRunsFinished = true;
break;
}
}
if (!anyIncumbentRunsFinished) return;
for (Instance instance: api.getExperimentInstances(parameters.getIdExperiment())) {
double incumbentAvg = 0.0f;
int count = 0;
for (ExperimentResult run: bestSC.getFinishedJobs()) {
if (run.getInstanceId() == instance.getId()) {
incumbentAvg += parameters.getStatistics().getCostFunction().singleCost(run);
count++;
}
}
if (count > 0) {
incumbentAvg /= count;
int newLimit = Math.max(1, Math.min((int)Math.ceil(slackFactor * incumbentAvg), parameters.getJobCPUTimeLimit()));
if (limitByInstance.get(instance.getId()) != null && limitByInstance.get(instance.getId()) == newLimit) {
// limit did not change
continue;
}
limitByInstance.put(instance.getId(), newLimit);
pacc.changeCPUTimeLimit(instance.getId(), newLimit, new LinkedList<SolverConfiguration>(challengers), false, false);
}
}
}
}
}
|
package hudson.maven;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Proc;
import hudson.AbortException;
import hudson.EnvVars;
import hudson.slaves.Channels;
import static hudson.Util.fixNull;
import hudson.maven.agent.Main;
import hudson.maven.agent.Maven21Interceptor;
import hudson.maven.agent.PluginManagerInterceptor;
import hudson.maven.ProcessCache.NewProcess;
import hudson.model.BuildListener;
import hudson.model.Computer;
import hudson.model.Executor;
import hudson.model.Hudson;
import hudson.model.JDK;
import hudson.model.Node;
import hudson.model.Run.RunnerAbortedException;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.remoting.Channel;
import hudson.remoting.RemoteInputStream;
import hudson.remoting.RemoteOutputStream;
import hudson.remoting.SocketInputStream;
import hudson.remoting.SocketOutputStream;
import hudson.remoting.Which;
import hudson.tasks.Maven.MavenInstallation;
import hudson.util.ArgumentListBuilder;
import hudson.util.IOException2;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.util.Arrays;
import java.util.logging.Logger;
/**
* Launches the maven process.
*
* @author Kohsuke Kawaguchi
*/
final class MavenProcessFactory implements ProcessCache.Factory {
private final MavenModuleSet mms;
private final Launcher launcher;
/**
* Environment variables to be set to the maven process.
* The same variables are exposed to the system property as well.
*/
private final EnvVars envVars;
/**
* Optional working directory. Because of the process reuse, we can't always guarantee
* that the returned Maven process has this as the working directory. But for the
* aggregator style build, the process reuse is disabled, so in practice this always works.
*
* Also, Maven is supposed to work correctly regardless of the process current directory,
* so a good behaving maven project shouldn't rely on the current project.
*/
private final FilePath workDir;
MavenProcessFactory(MavenModuleSet mms, Launcher launcher, EnvVars envVars, FilePath workDir) {
this.mms = mms;
this.launcher = launcher;
this.envVars = envVars;
this.workDir = workDir;
}
/**
* Represents a bi-directional connection.
*
* <p>
* This implementation is remoting aware, so it can be safely sent to the remote callable object.
*
* <p>
* When we run Maven on a slave, the master may not have a direct TCP/IP connectivty to the slave.
* That means the {@link Channel} between the master and the Maven needs to be tunneled through
* the channel between master and the slave, then go to TCP socket to the Maven.
*/
private static final class Connection implements Serializable {
public InputStream in;
public OutputStream out;
Connection(InputStream in, OutputStream out) {
this.in = in;
this.out = out;
}
private Object writeReplace() {
return new Connection(new RemoteInputStream(in),new RemoteOutputStream(out));
}
private Object readResolve() {
// ObjectInputStream seems to access data at byte-level and do not do any buffering,
// so if we are remoted, buffering would be crucial.
this.in = new BufferedInputStream(in);
this.out = new BufferedOutputStream(out);
return this;
}
private static final long serialVersionUID = 1L;
}
interface Acceptor {
Connection accept() throws IOException;
int getPort();
}
/**
* Opens a server socket and returns {@link Acceptor} so that
* we can accept a connection later on it.
*/
private static final class SocketHandler implements Callable<Acceptor,IOException> {
public Acceptor call() throws IOException {
return new AcceptorImpl();
}
private static final long serialVersionUID = 1L;
static final class AcceptorImpl implements Acceptor, Serializable {
private transient final ServerSocket serverSocket;
private transient Socket socket;
AcceptorImpl() throws IOException {
// open a TCP socket to talk to the launched Maven process.
// let the OS pick up a random open port
this.serverSocket = new ServerSocket();
serverSocket.bind(null); // new InetSocketAddress(InetAddress.getLocalHost(),0));
// prevent a hang at the accept method in case the forked process didn't start successfully
serverSocket.setSoTimeout(30*1000);
}
public Connection accept() throws IOException {
socket = serverSocket.accept();
// we'd only accept one connection
serverSocket.close();
return new Connection(new SocketInputStream(socket),new SocketOutputStream(socket));
}
public int getPort() {
return serverSocket.getLocalPort();
}
/**
* When sent to the remote node, send a proxy.
*/
private Object writeReplace() {
return Channel.current().export(Acceptor.class, this);
}
}
}
/**
* Starts maven process.
*/
public NewProcess newProcess(BuildListener listener, OutputStream out) throws IOException, InterruptedException {
if(debug)
listener.getLogger().println("Using env variables: "+ envVars);
try {
final Acceptor acceptor = launcher.getChannel().call(new SocketHandler());
final ArgumentListBuilder cmdLine = buildMavenCmdLine(listener,acceptor.getPort());
String[] cmds = cmdLine.toCommandArray();
final Proc proc = launcher.launch().cmds(cmds).envs(envVars).stdout(out).pwd(workDir).start();
Connection con;
try {
con = acceptor.accept();
} catch (SocketTimeoutException e) {
// failed to connect. Is the process dead?
// if so, the error should have been provided by the launcher already.
// so abort gracefully without a stack trace.
if(!proc.isAlive())
throw new AbortException("Failed to launch Maven. Exit code = "+proc.join());
throw e;
}
return new NewProcess(
Channels.forProcess("Channel to Maven "+ Arrays.toString(cmds),
Computer.threadPoolForRemoting, new BufferedInputStream(con.in), new BufferedOutputStream(con.out),
listener.getLogger(), proc),
proc);
} catch (IOException e) {
if(fixNull(e.getMessage()).contains("java: not found")) {
// diagnose issue #659
JDK jdk = mms.getJDK();
if(jdk==null)
throw new IOException2(mms.getDisplayName()+" is not configured with a JDK, but your PATH doesn't include Java",e);
}
throw e;
}
}
/**
* Builds the command line argument list to launch the maven process.
*
* UGLY.
*/
private ArgumentListBuilder buildMavenCmdLine(BuildListener listener,int tcpPort) throws IOException, InterruptedException {
MavenInstallation mvn = getMavenInstallation(listener);
if(mvn==null) {
listener.error("Maven version is not configured for this project. Can't determine which Maven to run");
throw new RunnerAbortedException();
}
if(mvn.getHome()==null) {
listener.error("Maven '%s' doesn't have its home set",mvn.getName());
throw new RunnerAbortedException();
}
// find classworlds.jar
String classWorldsJar = launcher.getChannel().call(new GetClassWorldsJar(mvn.getHome(),listener));
boolean isMaster = getCurrentNode()== Hudson.getInstance();
FilePath slaveRoot=null;
if(!isMaster)
slaveRoot = getCurrentNode().getRootPath();
ArgumentListBuilder args = new ArgumentListBuilder();
JDK jdk = getJava(listener);
if(jdk==null) {
args.add("java");
} else {
args.add(jdk.getHome()+"/bin/java"); // use JDK.getExecutable() here ?
}
if(debugPort!=0)
args.add("-Xrunjdwp:transport=dt_socket,server=y,address="+debugPort);
if(yjp)
args.add("-agentlib:yjpagent=tracing");
args.addTokenized(getMavenOpts());
args.add("-cp");
args.add(
(isMaster? Which.jarFile(Main.class).getAbsolutePath():slaveRoot.child("maven-agent.jar").getRemote())+
(launcher.isUnix()?":":";")+classWorldsJar);
args.add(Main.class.getName());
// M2_HOME
args.add(mvn.getHome());
// remoting.jar
String remotingJar = launcher.getChannel().call(new GetRemotingJar());
if(remotingJar==null) {// this shouldn't be possible, but there are still reports indicating this, so adding a probe here.
listener.error("Failed to determine the location of slave.jar");
throw new RunnerAbortedException();
}
args.add(remotingJar);
// interceptor.jar
args.add(isMaster?
Which.jarFile(PluginManagerInterceptor.class).getAbsolutePath():
slaveRoot.child("maven-interceptor.jar").getRemote());
// TCP/IP port to establish the remoting infrastructure
args.add(tcpPort);
// if this is Maven 2.1, interceptor override
if(mvn.isMaven2_1(launcher)) {
args.add(isMaster?
Which.jarFile(Maven21Interceptor.class).getAbsolutePath():
slaveRoot.child("maven2.1-interceptor.jar").getRemote());
}
return args;
}
public String getMavenOpts() {
String mavenOpts = mms.getMavenOpts();
if ((mavenOpts==null) || (mavenOpts.trim().length()==0)) {
Node n = getCurrentNode();
if (n!=null) {
try {
String localMavenOpts = n.toComputer().getEnvironment().get("MAVEN_OPTS");
if ((localMavenOpts!=null) && (localMavenOpts.trim().length()>0)) {
mavenOpts = localMavenOpts;
}
} catch (IOException e) {
} catch (InterruptedException e) {
// Don't do anything - this just means the slave isn't running, so we
// don't want to use its MAVEN_OPTS anyway.
}
}
}
return envVars.expand(mavenOpts);
}
public MavenInstallation getMavenInstallation(TaskListener log) throws IOException, InterruptedException {
MavenInstallation mi = mms.getMaven();
if (mi != null) mi = mi.forNode(getCurrentNode(), log).forEnvironment(envVars);
return mi;
}
public JDK getJava(TaskListener log) throws IOException, InterruptedException {
JDK jdk = mms.getJDK();
if (jdk != null) jdk = jdk.forNode(getCurrentNode(), log).forEnvironment(envVars);
return jdk;
}
/**
* Finds classworlds.jar
*/
private static final class GetClassWorldsJar implements Callable<String,IOException> {
private final String mvnHome;
private final TaskListener listener;
private GetClassWorldsJar(String mvnHome, TaskListener listener) {
this.mvnHome = mvnHome;
this.listener = listener;
}
public String call() throws IOException {
File home = new File(mvnHome);
File bootDir = new File(home, "core/boot");
File[] classworlds = bootDir.listFiles(CLASSWORLDS_FILTER);
if(classworlds==null || classworlds.length==0) {
// Maven 2.0.6 puts it to a different place
bootDir = new File(home, "boot");
classworlds = bootDir.listFiles(CLASSWORLDS_FILTER);
if(classworlds==null || classworlds.length==0) {
listener.error(Messages.MavenProcessFactory_ClassWorldsNotFound(home));
throw new RunnerAbortedException();
}
}
return classworlds[0].getAbsolutePath();
}
}
private static final class GetRemotingJar implements Callable<String,IOException> {
public String call() throws IOException {
return Which.jarFile(hudson.remoting.Launcher.class).getPath();
}
}
/**
* Returns the current {@link Node} on which we are buildling.
*/
private Node getCurrentNode() {
return Executor.currentExecutor().getOwner().getNode();
}
/**
* Locates classworlds jar file.
*
* Note that Maven 3.0 changed the name to plexus-classworlds
*
* <pre>
* $ find tools/ -name "*classworlds*.jar"
* tools/maven/boot/classworlds-1.1.jar
* tools/maven-2.2.1/boot/classworlds-1.1.jar
* tools/maven-3.0-alpha-2/boot/plexus-classworlds-1.3.jar
* tools/maven-3.0-alpha-3/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-4/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-5/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-6/boot/plexus-classworlds-2.2.2.jar
* </pre>
*/
private static final FilenameFilter CLASSWORLDS_FILTER = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.contains("classworlds") && name.endsWith(".jar");
}
};
/**
* Set true to produce debug output.
*/
public static boolean debug = false;
/**
* If not 0, launch Maven with a debugger port.
*/
public static int debugPort;
public static boolean profile = Boolean.getBoolean("hudson.maven.profile");
/**
* If true, launch Maven with YJP offline profiler agent.
*/
public static boolean yjp = Boolean.getBoolean("hudson.maven.yjp");
static {
String port = System.getProperty("hudson.maven.debugPort");
if(port!=null)
debugPort = Integer.parseInt(port);
}
private static final Logger LOGGER = Logger.getLogger(MavenProcessFactory.class.getName());
}
|
package edu.cmu.cs.diamond.opendiamond;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.Socket;
import java.util.List;
import java.util.Set;
class Connection {
private static final int DIAMOND_PORT = 5872;
private static final int NONCE_SIZE = 16;
final private MiniRPCConnection control;
final private MiniRPCConnection blast;
final private String hostname;
String getHostname() {
return hostname;
}
// all public methods must close() on IOException!
private static Socket createOneChannel(String address, byte nonce[])
throws IOException {
if (nonce.length != NONCE_SIZE) {
throw new IllegalArgumentException("nonce[] must be NONCE_SIZE ("
+ NONCE_SIZE + "), actual size " + nonce.length);
}
Socket socket = new Socket(address, DIAMOND_PORT);
// System.out.println(address);
// System.out.println(socket);
DataInputStream in = new DataInputStream(socket.getInputStream());
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
// write nonce
// System.out.println("writing " + Arrays.toString(nonce));
out.write(nonce);
// read nonce
in.readFully(nonce);
// System.out.println("read " + Arrays.toString(nonce));
return socket;
}
Connection(MiniRPCConnection control, MiniRPCConnection blast,
String hostname) {
this.control = control;
this.blast = blast;
this.hostname = hostname;
}
static Connection createConnection(String host, List<Cookie> cookieList,
Set<String> pushAttributes, XDR_sig_and_data fspec,
List<Filter> filters) throws ServerException {
// System.out.println("connecting to " + host);
byte nonce[] = new byte[NONCE_SIZE];
MiniRPCConnection control;
MiniRPCConnection blast;
try {
// open control (if exception is thrown here, it's ok)
control = new MiniRPCConnection(createOneChannel(host, nonce));
// open data
try {
blast = new MiniRPCConnection(createOneChannel(host, nonce));
} catch (IOException e) {
try {
// close control and propagate
control.close();
} catch (IOException e2) {
}
throw e;
}
Connection conn = new Connection(control, blast, host);
conn.sendPreStart(cookieList, pushAttributes, fspec, filters);
return conn;
} catch (IOException e) {
throw new ServerException(host, e);
}
}
// TODO pipeline
private void sendPreStart(List<Cookie> cookieList,
Set<String> pushAttributes, XDR_sig_and_data fspec,
List<Filter> filters) throws IOException {
try {
// define scope
for (Cookie cookie : cookieList) {
byte[] data = XDREncoders.encodeString(cookie.getCookie());
new RPC(this, hostname, 24, data).doRPC().checkStatus();
}
// set the push attributes
if (pushAttributes != null) {
byte[] encodedAttributes = new XDR_attr_name_list(
pushAttributes).encode();
new RPC(this, hostname, 20, encodedAttributes).doRPC()
.checkStatus();
}
// set the fspec
// device_set_spec = 6
new RPC(this, hostname, 6, fspec.encode()).doRPC().checkStatus();
// set the codes and blobs
for (Filter f : filters) {
setCode(f);
setBlob(f);
}
} catch (IOException e) {
close();
throw e;
}
}
public void sendStart() throws IOException {
try {
// start search
byte encodedSearchId[] = new byte[4];
// device_start_search = 1
new RPC(this, hostname, 1, encodedSearchId).doRPC().checkStatus();
} catch (IOException e) {
close();
throw e;
}
}
private void setBlob(Filter f) throws IOException {
final byte encodedBlobSig[] = f.getEncodedBlobSig();
final byte encodedBlob[] = f.getEncodedBlob();
// System.out.println("blob sig: " + encodedBlobSig);
// device_set_blob_by_signature = 22
MiniRPCReply reply1 = new RPC(this, hostname, 22, encodedBlobSig)
.doRPC();
if (reply1.getMessage().getStatus() != RPC.DIAMOND_FCACHEMISS) {
reply1.checkStatus();
return;
}
// device_set_blob = 11
new RPC(this, hostname, 11, encodedBlob).doRPC().checkStatus();
}
private void setCode(Filter f) throws IOException {
byte code[] = f.getFilterCode().getBytes();
XDR_sig_val sig = XDR_sig_val.createSignature(code);
XDR_sig_and_data sigAndData = new XDR_sig_and_data(sig, code);
final byte[] encodedSig = sig.encode();
final byte[] encodedSigAndData = sigAndData.encode();
// device_set_obj = 16
MiniRPCReply reply1 = new RPC(this, hostname, 16, encodedSig).doRPC();
if (reply1.getMessage().getStatus() != RPC.DIAMOND_FCACHEMISS) {
reply1.checkStatus();
return;
}
// device_send_obj = 17
new RPC(this, hostname, 17, encodedSigAndData).doRPC().checkStatus();
}
void close() {
// System.out.println("closing " + toString());
try {
control.close();
} catch (IOException e) {
e.printStackTrace();
}
try {
blast.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private MiniRPCMessage receiveFrom(MiniRPCConnection c) throws IOException {
try {
return c.receive();
} catch (IOException e) {
close();
throw e;
}
}
public MiniRPCMessage receiveBlast() throws ServerException {
try {
return receiveFrom(blast);
} catch (IOException e) {
close();
throw new ServerException(hostname, e);
}
}
public void sendMessageBlast(int cmd, byte data[]) throws ServerException {
try {
blast.sendMessage(cmd, data);
} catch (IOException e) {
close();
throw new ServerException(hostname, e);
}
}
public void sendControlRequest(int cmd, byte[] data) throws ServerException {
try {
control.sendRequest(cmd, data);
} catch (IOException e) {
close();
throw new ServerException(hostname, e);
}
}
public MiniRPCMessage receiveControl() throws ServerException {
try {
return control.receive();
} catch (IOException e) {
close();
throw new ServerException(hostname, e);
}
}
}
|
package Utility;
import javax.tools.*;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
public class JavaSourceCompiler {
private static String targetDir;
private DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
public void CompileJavaSource(JavaSourceBuffer[] jsb)
{
//Loads current directory and specify where to store compiled files
targetDir = System.getProperty("user.dir");
if(targetDir.contains("ObsidiCode"))
targetDir += "/CompiledSources/";
else if(targetDir.contains("Minecraft")) {
targetDir += "/saves/";
File targetFolder = new File(targetDir + "CompiledSources/");
if(!targetFolder.exists())
{
try {
System.out.println("Creating directory for compiled sources");
targetFolder.mkdir();
targetDir = targetFolder.getAbsolutePath() + "/";
}
catch (SecurityException e)
{
ErrorHandling.Error("Could not create directory for compiled sources.");
}
}
targetDir = targetFolder.getAbsolutePath() + "/";
}
//Create an iteratable data-structure to store compilation-files
Iterable<? extends JavaFileObject> compilationUnits[] = new Iterable[jsb.length];
for (int i = 0; i < jsb.length; i++)
{
compilationUnits[i] = Arrays.asList(jsb[i]);
}
//And get the compiler from the system
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
try {
JavaCompiler.CompilationTask[] taskArr = new JavaCompiler.CompilationTask[jsb.length];
boolean succes = true;
for (int i = 0; i < compilationUnits.length; i++) {
//Open an out-stream for code output
FileWriter codeOutput = new FileWriter(targetDir + jsb[i].getClassName() + ".class", false); //false means override existing file
//Compile and store in the output-stream
taskArr[i] = compiler.getTask(codeOutput, null, diagnostics, null, null, compilationUnits[i]);
//Close and flush output-stream
codeOutput.flush();
codeOutput.close();
try {
if (!taskArr[i].call())
succes = false;
} catch (UnsupportedOperationException e)
{
System.out.println(e.getMessage());
}
}
if(!succes) {
System.out.println("Compilation failed, should provide some errors in console.");
}
//For debugging!
System.out.println(getDiagnostics());
}
catch (IOException e)
{
System.out.println("Could not create compilation output.");
}
}
public String getDiagnostics()
{
StringBuilder sb = new StringBuilder();
//Generate error-pages here, right now just writing to console
for (Diagnostic diagnostic : diagnostics.getDiagnostics()) {
sb.append(diagnostic.getSource() + "\n");
sb.append("Line " + diagnostic.getLineNumber());
sb.append(", position " + diagnostic.getPosition());
sb.append(": ");
sb.append(diagnostic.getMessage(null));
sb.append("\n\n");
}
return sb.toString();
}
}
|
package com.algolia.search.saas;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.junit.runners.MethodSorters;
import com.algolia.search.saas.APIClient.LogType;
import com.algolia.search.saas.Query.QueryType;
import com.algolia.search.saas.Query.TypoTolerance;
@RunWith(JUnit4.class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class SimpleTest {
private static final String indexName = safe_name("àlgol?à-java");
private static APIClient client;
private static Index index;
public static String safe_name(String name) {
if (System.getenv("TRAVIS") != null) {
String[] id = System.getenv("TRAVIS_JOB_NUMBER").split("\\.");
return name + "_travis" + id[id.length - 1];
}
return name;
}
public static boolean isPresent(JSONArray array, String search, String attr) throws JSONException {
boolean isPresent = false;
for (int i = 0; i < array.length(); ++i) {
isPresent = isPresent || array.getJSONObject(i).getString(attr).equals(search);
}
return isPresent;
}
@BeforeClass
public static void init() {
String applicationID = System.getenv("ALGOLIA_APPLICATION_ID");
String apiKey = System.getenv("ALGOLIA_API_KEY");
Assume.assumeFalse("You must set environement variables ALGOLIA_APPLICATION_ID and ALGOLIA_API_KEY to run the tests.", applicationID == null || apiKey == null);
client = new APIClient(applicationID, apiKey);
index = client.initIndex(indexName);
}
@Before
public void eachInit() {
try {
index.clearIndex();
}
catch (AlgoliaException e) {
//Normal
}
}
@AfterClass
public static void dispose() {
try {
client.deleteIndex(indexName);
} catch (AlgoliaException e) {
// Not fatal
}
}
@Test
public void test01_deleteIndexIfExists() {
try {
client.deleteIndex(indexName);
} catch (AlgoliaException e) {
// not fatal
}
}
@Test
public void test02_pushObject() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
}
@Test
public void test03_search() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
JSONObject res = index.search(new Query("foo").setTypoTolerance(TypoTolerance.TYPO_FALSE));
assertEquals(1, res.getJSONArray("hits").length());
assertEquals("foo", res.getJSONArray("hits").getJSONObject(0).getString("s"));
assertEquals(42, res.getJSONArray("hits").getJSONObject(0).getLong("i"));
assertEquals(true, res.getJSONArray("hits").getJSONObject(0).getBoolean("b"));
}
@Test
public void test04_saveObject() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
JSONObject res = index.search(new Query("foo"));
assertEquals(1, res.getJSONArray("hits").length());
res = index.saveObject(new JSONObject().put("s", "bar"), res.getJSONArray("hits").getJSONObject(0).getString("objectID"));
index.waitTask(res.getString("taskID"));
}
@Test
public void test05_searchUpdated() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
JSONObject res = index.search(new Query("foo"));
assertEquals(1, res.getJSONArray("hits").length());
res = index.saveObject(new JSONObject().put("s", "bar"), res.getJSONArray("hits").getJSONObject(0).getString("objectID"));
index.waitTask(res.getString("taskID"));
res = index.search(new Query("foo"));
assertEquals(0, res.getJSONArray("hits").length());
res = index.search(new Query("bar"));
assertEquals(1, res.getJSONArray("hits").length());
assertEquals("bar", res.getJSONArray("hits").getJSONObject(0).getString("s"));
}
@Test
public void test06_searchAll() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
JSONObject res = index.search(new Query("foo"));
assertEquals(1, res.getJSONArray("hits").length());
res = index.saveObject(new JSONObject().put("s", "bar"), res.getJSONArray("hits").getJSONObject(0).getString("objectID"));
index.waitTask(res.getString("taskID"));
res = index.search(new Query(""));
assertEquals(1, res.getJSONArray("hits").length());
res = index.search(new Query("*"));
assertEquals(1, res.getJSONArray("hits").length());
}
@Test
public void test07_addObject() throws AlgoliaException, JSONException {
assertEquals(indexName, index.getIndexName());
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test08_saveObject() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"), "a/go/?à");
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test09_partialUpdateObject() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"), "a/go/?à");
index.waitTask(task.getString("taskID"));
task = index.partialUpdateObject(new JSONObject()
.put("firtname", "Roger"), "a/go/?à");
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test10_getObject() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"), "a/go/?à");
index.waitTask(task.getString("taskID"));
JSONObject object = index.getObject("a/go/?à");
assertEquals("Jimmie", object.getString("firstname"));
}
@Test
public void test11_getObjectWithAttr() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"), "a/go/?à");
index.waitTask(task.getString("taskID"));
JSONObject object = index.getObject("a/go/?à", Arrays.asList("lastname", "firstname"));
assertEquals("Barninger", object.getString("lastname"));
}
@Test
public void test12_deleteObject() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("jimie"));
task = index.deleteObject("a/go/?à");
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test13_settings() throws AlgoliaException, JSONException {
JSONObject task = index.setSettings(new JSONObject()
.put("attributesToRetrieve", Arrays.asList("firstname")));
index.waitTask(new Long(task.getLong("taskID")).toString());
JSONObject settings = index.getSettings();
assertEquals("firstname", settings.getJSONArray("attributesToRetrieve").getString(0));
}
@Test
public void test14_index() throws AlgoliaException, JSONException, InterruptedException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"), "a/go/?à");
index.waitTask(task.getString("taskID"));
JSONObject res = client.listIndexes();
assertTrue(isPresent(res.getJSONArray("items"), indexName, "name"));
client.deleteIndex(indexName);
Thread.sleep(5000);
JSONObject resAfter = client.listIndexes();
assertFalse(isPresent(resAfter.getJSONArray("items"), indexName, "name"));
}
@Test
public void test15_addObjects() throws JSONException, AlgoliaException {
List<JSONObject> array = new ArrayList<JSONObject>();
array.add(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger"));
array.add(new JSONObject().put("firstname", "Warren").put("lastname", "Speach"));
JSONObject task = index.addObjects(array);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query(""));
assertEquals(2, res.getInt("nbHits"));
}
@Test
public void test15_deleteObjects() throws JSONException, AlgoliaException {
List<JSONObject> array = new ArrayList<JSONObject>();
array.add(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger").put("objectID", "à/go/?à"));
array.add(new JSONObject().put("firstname", "Warren").put("lastname", "Speach").put("objectID", "à/go/?à2"));
JSONObject task = index.addObjects(array);
index.waitTask(task.getString("taskID"));
List<String> deleted = new ArrayList<String>();
deleted.add("à/go/?à");
deleted.add("à/go/?à2");
task = index.deleteObjects(deleted);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query(""));
assertEquals(0, res.getInt("nbHits"));
}
@Test
public void test15_addObjectsList() throws JSONException, AlgoliaException {
JSONArray array = new JSONArray();
array.put(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger"));
array.put(new JSONObject().put("firstname", "Warren").put("lastname", "Speach"));
JSONObject task = index.addObjects(array);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query(""));
assertEquals(2, res.getInt("nbHits"));
}
@Test
public void test16_saveObjects() throws JSONException, AlgoliaException {
List<JSONObject> array = new ArrayList<JSONObject>();
array.add(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger").put("objectID", "a/go/?à"));
array.add(new JSONObject().put("firstname", "Warren").put("lastname", "Speach").put("objectID", "a/go/ià"));
JSONObject task = index.saveObjects(array);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query(""));
assertEquals(2, res.getInt("nbHits"));
}
@Test
public void test17_partialUpdateObjects() throws JSONException, AlgoliaException {
List<JSONObject> array = new ArrayList<JSONObject>();
array.add(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger").put("objectID", "a/go/?à"));
array.add(new JSONObject().put("firstname", "Warren").put("lastname", "Speach").put("objectID", "a/go/ià"));
JSONObject task = index.saveObjects(array);
index.waitTask(task.getString("taskID"));
array = new ArrayList<JSONObject>();
array.add(new JSONObject().put("firstname", "Roger").put("objectID", "a/go/?à"));
array.add(new JSONObject().put("firstname", "Robert").put("objectID", "a/go/ià"));
task = index.partialUpdateObjects(array);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("Ro"));
assertEquals(2, res.getInt("nbHits"));
}
@Test
public void test17_partialUpdateObjectsList() throws JSONException, AlgoliaException {
JSONArray array = new JSONArray();
array.put(new JSONObject().put("firstname", "Jimmie").put("lastname", "Barninger").put("objectID", "a/go/?à"));
array.put(new JSONObject().put("firstname", "Warren").put("lastname", "Speach").put("objectID", "a/go/ià"));
JSONObject task = index.saveObjects(array);
index.waitTask(task.getString("taskID"));
array = new JSONArray();
array.put(new JSONObject().put("firstname", "Roger").put("objectID", "a/go/?à"));
array.put(new JSONObject().put("firstname", "Robert").put("objectID", "a/go/ià"));
task = index.partialUpdateObjects(array);
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("Ro"));
assertEquals(2, res.getInt("nbHits"));
}
@Test
public void test18_user_key_index() throws AlgoliaException, JSONException {
JSONObject newKey = index.addUserKey(Arrays.asList("search"));
try { Thread.sleep(5000); } catch (Exception e) {}
assertTrue(!newKey.getString("key").equals(""));
JSONObject res = index.listUserKeys();
assertTrue(isPresent(res.getJSONArray("keys"), newKey.getString("key"), "value"));
JSONObject getKey = index.getUserKeyACL(newKey.getString("key"));
assertEquals(newKey.getString("key"), getKey.getString("value"));
index.updateUserKey(newKey.getString("key"), Arrays.asList("addObject"));
try { Thread.sleep(5000); } catch (Exception e) {}
getKey = index.getUserKeyACL(newKey.getString("key"));
assertEquals(getKey.getJSONArray("acl").get(0), "addObject");
index.deleteUserKey(getKey.getString("value"));
try { Thread.sleep(5000); } catch (Exception e) {}
JSONObject resAfter = index.listUserKeys();
assertTrue(!isPresent(resAfter.getJSONArray("keys"), newKey.getString("key"), "value"));
}
@Test
public void test19_user_key() throws AlgoliaException, JSONException {
JSONObject newKey = client.addUserKey(Arrays.asList("search"));
try { Thread.sleep(5000); } catch (Exception e) {}
assertTrue(!newKey.getString("key").equals(""));
JSONObject res = client.listUserKeys();
assertTrue(isPresent(res.getJSONArray("keys"), newKey.getString("key"), "value"));
JSONObject getKey = client.getUserKeyACL(newKey.getString("key"));
assertEquals(newKey.getString("key"), getKey.getString("value"));
client.updateUserKey(newKey.getString("key"), Arrays.asList("addObject"));
try { Thread.sleep(5000); } catch (Exception e) {}
getKey = client.getUserKeyACL(newKey.getString("key"));
assertEquals(getKey.getJSONArray("acl").get(0), "addObject");
client.deleteUserKey(getKey.getString("value"));
try { Thread.sleep(5000); } catch (Exception e) {}
JSONObject resAfter = client.listUserKeys();
assertTrue(!isPresent(resAfter.getJSONArray("keys"), newKey.getString("key"), "value"));
}
@Test
public void test20_moveIndex() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
task = client.moveIndex(indexName, indexName + "2");
Index newIndex = client.initIndex(indexName + "2");
newIndex.waitTask(task.getString("taskID"));
Query query = new Query();
query.setQueryType(QueryType.PREFIX_ALL);
query.setQueryString("jimye");
query.setAttributesToRetrieve(Arrays.asList("firstname"));
query.setAttributesToHighlight(new ArrayList<String>());
query.setAttributesToSnippet(new ArrayList<String>());
query.enableDistinct(false);
query.setMinWordSizeToAllowOneTypo(1);
query.setMinWordSizeToAllowTwoTypos(2);
query.getRankingInfo(true);
query.setPage(0);
query.setHitsPerPage(1);
assertTrue(!query.getQueryString().equals(""));
JSONObject res = newIndex.search(query);
assertEquals(1, res.getInt("nbHits"));
try {
index.search(new Query("jimie"));
assertTrue(false);
} catch (AlgoliaException e) {
assertTrue(true);
}
client.deleteIndex(indexName + "2");
}
@Test
public void test21_copyIndex() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
task = client.copyIndex(indexName, indexName + "2");
Index newIndex = client.initIndex(indexName + "2");
newIndex.waitTask(task.getString("taskID"));
JSONObject res = newIndex.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
res = index.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
client.deleteIndex(indexName + "2");
}
@Test
public void test22_browse() throws AlgoliaException, JSONException {
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
JSONObject res = index.browse(0);
assertEquals(1, res.getInt("nbHits"));
res = index.browse(0, 1);
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test23_logs() throws AlgoliaException, JSONException {
JSONObject res = client.getLogs();
assertTrue(res.getJSONArray("logs").length() > 0);
res = client.getLogs(0, 1);
assertTrue(res.getJSONArray("logs").length() == 1);
res = client.getLogs(0, 1, false);
assertTrue(res.getJSONArray("logs").length() == 1);
res = client.getLogs(0, 1, LogType.LOG_ALL);
assertTrue(res.getJSONArray("logs").length() == 1);
}
@Test
public void test24_EmptyAPPID() {
try {
new APIClient(null, "algolia");
assertTrue(false);
}
catch (RuntimeException e){
assertTrue(true);
}
}
@Test
public void test25_EmptyAPPKEY() {
try {
new APIClient("algolia", null);
assertTrue(false);
}
catch (RuntimeException e){
assertTrue(true);
}
}
@Test
public void test26_EmptyHost() {
try {
new APIClient("algolia", "algolia", new ArrayList<String>());
assertTrue(false);
}
catch (RuntimeException e){
assertTrue(true);
}
}
@Test
public void test27_headerDisableRateLimit() throws AlgoliaException, JSONException {
client.disableRateLimitForward();
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query());
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test29_user_keyLimit() throws AlgoliaException, JSONException {
JSONObject newKey = client.addUserKey(Arrays.asList("search"), 0, 2, 2);
try { Thread.sleep(5000); } catch (Exception e) {}
assertTrue(!newKey.getString("key").equals(""));
JSONObject res = client.listUserKeys();
assertTrue(isPresent(res.getJSONArray("keys"), newKey.getString("key"), "value"));
index.deleteUserKey(newKey.getString("key"));
}
@Test
public void test30_user_key_indexLimit() throws AlgoliaException, JSONException {
JSONObject newKey = index.addUserKey(Arrays.asList("search"), 0, 2, 2);
try { Thread.sleep(5000); } catch (Exception e) {}
assertTrue(!newKey.getString("key").equals(""));
JSONObject res = index.listUserKeys();
assertTrue(isPresent(res.getJSONArray("keys"), newKey.getString("key"), "value"));
index.deleteUserKey(newKey.getString("key"));
}
@Test
public void test31_InvalidKey() {
try {
APIClient client = new APIClient("unreach", "test");
client.listIndexes();
assertTrue(false);
}
catch (AlgoliaException e) {
assertTrue(true);
}
}
@Test
public void test32_InvalidObjectID() {
try {
index.deleteObject("");
assertTrue(false);
}
catch (AlgoliaException e) {
assertTrue(true);
}
}
@Test
public void test33_customBatch() throws AlgoliaException, JSONException {
assertEquals(indexName, index.getIndexName());
JSONObject task = index.addObject(new JSONObject()
.put("firstname", "Jimmie")
.put("lastname", "Barninger")
.put("followers", 93)
.put("company", "California Paint"));
index.waitTask(task.getString("taskID"));
JSONObject res = index.search(new Query("jimie"));
assertEquals(1, res.getInt("nbHits"));
JSONArray actions = new JSONArray();
JSONObject action = new JSONObject();
action.put("action", "deleteObject");
action.put("objectID", "a/go/?à");
actions.put(action);
task = index.batch(actions);
index.waitTask(task.getString("taskID"));
}
@Test
public void test34_securedApiKeys() throws InvalidKeyException, NoSuchAlgorithmException {
assertEquals("1fd74b206c64fb49fdcd7a5f3004356cd3bdc9d9aba8733656443e64daafc417", APIClient.hmac("my_api_key", "(public,user1)"));
String key = client.generateSecuredApiKey("my_api_key", "(public,user1)");
assertEquals(key, APIClient.hmac("my_api_key", "(public,user1)"));
key = client.generateSecuredApiKey("my_api_key", "(public,user1)", "" + 42);
assertEquals(key, APIClient.hmac("my_api_key", "(public,user1)42"));
}
@Test
public void test34_multipleQueries() throws AlgoliaException, JSONException {
JSONObject obj = index.addObject(new JSONObject().put("i", 42).put("s", "foo").put("b", true));
index.waitTask(obj.getString("taskID"));
List<APIClient.IndexQuery> queries = new ArrayList<APIClient.IndexQuery>();
queries.add(new APIClient.IndexQuery(safe_name("àlgol?à-java"), new Query("")));
JSONObject res = client.multipleQueries(queries);
assertEquals(1, res.getJSONArray("results").length());
assertEquals(1, res.getJSONArray("results").getJSONObject(0).getJSONArray("hits").length());
assertEquals("foo", res.getJSONArray("results").getJSONObject(0).getJSONArray("hits").getJSONObject(0).getString("s"));
assertEquals(42, res.getJSONArray("results").getJSONObject(0).getJSONArray("hits").getJSONObject(0).getLong("i"));
assertEquals(true, res.getJSONArray("results").getJSONObject(0).getJSONArray("hits").getJSONObject(0).getBoolean("b"));
}
@Test
public void test35_getObjects() throws AlgoliaException, JSONException {
JSONObject task = index.addObjects(new JSONArray().put(new JSONObject()
.put("name", "Los Angeles").put("objectID", "1")).put(new JSONObject()
.put("name", "San Francisco").put("objectID", "2")));
index.waitTask(task.getString("taskID"));
List<String> objectIDs = new ArrayList<String>();
objectIDs.add("1");
objectIDs.add("2");
JSONObject object = index.getObjects(objectIDs);
assertEquals("Los Angeles", object.getJSONArray("results").getJSONObject(0).getString("name"));
assertEquals("San Francisco", object.getJSONArray("results").getJSONObject(1).getString("name"));
}
@Test
public void test36_deleteByQuery() throws JSONException, AlgoliaException {
JSONObject task = index.addObjects(new JSONArray().put(new JSONObject()
.put("name", "Washington"))
.put(new JSONObject().put("name", "San Francisco"))
.put(new JSONObject().put("name", "San Jose")));
index.waitTask(task.getString("taskID"));
index.deleteByQuery(new Query("San"));
JSONObject res = index.search(new Query(""));
assertEquals(1, res.getInt("nbHits"));
}
@Test
public void test37_disjunctiveFaceting() throws AlgoliaException, JSONException {
index.setSettings(new JSONObject("{\"attributesForFaceting\":[\"city\", \"stars\", \"facilities\"]}"));
JSONObject task = index.addObjects(new JSONArray()
.put(new JSONObject("{\"name\":\"Hotel A\", \"stars\":\"*\", \"facilities\":[\"wifi\", \"bath\", \"spa\"], \"city\":\"Paris\"}"))
.put(new JSONObject("{\"name\":\"Hotel B\", \"stars\":\"*\", \"facilities\":[\"wifi\"], \"city\":\"Paris\"}"))
.put(new JSONObject("{\"name\":\"Hotel C\", \"stars\":\"**\", \"facilities\":[\"bath\"], \"city\":\"San Fancisco\"}"))
.put(new JSONObject("{\"name\":\"Hotel D\", \"stars\":\"****\", \"facilities\":[\"spa\"], \"city\":\"Paris\"}"))
.put(new JSONObject("{\"name\":\"Hotel E\", \"stars\":\"****\", \"facilities\":[\"spa\"], \"city\":\"New York\"}")));
index.waitTask(task.getString("taskID"));
HashMap<String, List<String>> refinements = new HashMap<String, List<String>>();
List<String> disjunctiveFacets = new ArrayList<String>();
List<String> facets = new ArrayList<String>();
facets.add("city");
disjunctiveFacets.add("stars");
disjunctiveFacets.add("facilities");
JSONObject answer = index.searchDisjunctiveFaceting(new Query("h").setFacets(facets), disjunctiveFacets);
assertEquals(5, answer.getInt("nbHits"));
assertEquals(1, answer.getJSONObject("facets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").length());
ArrayList<String> refineValue = new ArrayList<String>();
refineValue.add("*");
refinements.put("stars", refineValue);
answer = index.searchDisjunctiveFaceting(new Query("h").setFacets(facets), disjunctiveFacets, refinements);
assertEquals(2, answer.getInt("nbHits"));
assertEquals(1, answer.getJSONObject("facets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("*"));
assertEquals(1, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("**"));
assertEquals(2, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("****"));
refineValue = new ArrayList<String>();
refineValue.add("Paris");
refinements.put("city", refineValue);
answer = index.searchDisjunctiveFaceting(new Query("h").setFacets(facets), disjunctiveFacets, refinements);
assertEquals(2, answer.getInt("nbHits"));
assertEquals(1, answer.getJSONObject("facets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("*"));
assertEquals(1, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("****"));
refineValue = new ArrayList<String>();
refineValue.add("*");
refineValue.add("****");
refinements.put("stars", refineValue);
answer = index.searchDisjunctiveFaceting(new Query("h").setFacets(facets), disjunctiveFacets, refinements);
assertEquals(3, answer.getInt("nbHits"));
assertEquals(1, answer.getJSONObject("facets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").length());
assertEquals(2, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("*"));
assertEquals(1, answer.getJSONObject("disjunctiveFacets").getJSONObject("stars").getInt("****"));
}
@Test
public void test38_keepAlive() throws AlgoliaException, JSONException {
JSONObject task = index.addObjects(new JSONArray().put(new JSONObject()
.put("name", "Los Angeles").put("objectID", "1")).put(new JSONObject()
.put("name", "San Francisco").put("objectID", "2")));
index.waitTask(task.getString("taskID"));
try {
Thread.sleep(TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS));
} catch (InterruptedException e) {
}
// Redefine a client to break the current keep alive
String applicationID = System.getenv("ALGOLIA_APPLICATION_ID");
String apiKey = System.getenv("ALGOLIA_API_KEY");
client = new APIClient(applicationID, apiKey);
index = client.initIndex(indexName);
double firstDSNQuery = 0;
double avgDSNQuery = 0;
long current = System.currentTimeMillis();
index.search(new Query());
firstDSNQuery = System.currentTimeMillis() - current;
int upperBound = 10;
for (int i = 0; i < upperBound; ++i) {
current = System.currentTimeMillis();
index.search(new Query());
avgDSNQuery += System.currentTimeMillis() - current;
}
avgDSNQuery /= upperBound;
assertTrue(2.0 < firstDSNQuery / avgDSNQuery);
}
}
|
package com.github.enanomapper;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.StringReader;
import java.util.Iterator;
import java.util.Set;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAxiom;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.parameters.Imports;
import org.semanticweb.owlapi.search.Searcher;
public class SlimmerTest {
@Test
public void testLoading() throws OWLOntologyCreationException {
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertNotSame(0, ontology.getAxiomCount());
}
@Test
public void testParsingUp() throws Exception {
String test = "+U:http://www.ifomis.org/bfo/1.1/snap#DependentContinuant";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3, ontology.getClassesInSignature().size());
}
@Test
public void testParsingSingle() throws Exception {
String test = "+:http://www.ifomis.org/bfo/1.1/snap#DependentContinuant";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(1, ontology.getClassesInSignature().size());
}
@Test
public void testParsingDown() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1/snap#MaterialEntity";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(4, ontology.getClassesInSignature().size());
}
@Test
public void testKeepAll() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1#Entity";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(39, ontology.getClassesInSignature().size());
}
@Test
public void bug19() throws Exception {
String test = "+(http://www.ifomis.org/bfo/1.1
+ "+(http://www.ifomis.org/bfo/1.1
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(3, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3, ontology.getClassesInSignature().size());
}
@Test
public void testParsingDownLeave() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1/snap#FiatObjectPart";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(1, ontology.getClassesInSignature().size());
}
@Test
public void testDeleteUp() throws Exception {
String test = "+U:http://www.ifomis.org/bfo/1.1/snap#DependentContinuant\n"
+ "-U:http://www.ifomis.org/bfo/1.1/snap#DependentContinuant";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Set<Instruction> irisToRemove = conf.getTreePartsToRemove();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
slimmer.removeAll(irisToRemove);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(0, ontology.getClassesInSignature().size());
}
@Test
public void testKeepAllButOne() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1#Entity\n"
+ "-D:http://www.ifomis.org/bfo/1.1/snap#FiatObjectPart\n";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Set<Instruction> irisToRemove = conf.getTreePartsToRemove();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
slimmer.removeAll(irisToRemove);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(38, ontology.getClassesInSignature().size());
}
@Test
public void testDeleteDown() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1/snap#MaterialEntity\n"
+ "-D:http://www.ifomis.org/bfo/1.1/snap#FiatObjectPart";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Set<Instruction> irisToRemove = conf.getTreePartsToRemove();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
slimmer.removeAll(irisToRemove);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3, ontology.getClassesInSignature().size());
}
@Test
public void testDeleteDownWithComment() throws Exception {
String test = "+D:http://www.ifomis.org/bfo/1.1/snap#MaterialEntity Comment\n"
+ "-D:http://www.ifomis.org/bfo/1.1/snap#FiatObjectPart";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Set<Instruction> irisToRemove = conf.getTreePartsToRemove();
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
slimmer.removeAll(irisToRemove);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3, ontology.getClassesInSignature().size());
}
@Test
public void testParsingWithNewSuperClass() throws Exception {
String test = "+D(http://www.ifomis.org/bfo/1.1/snap
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Iterator<Instruction> instructions = irisToSave.iterator();
Instruction instruction = instructions.next();
if (!instruction.getUriString().endsWith("Object")) instruction = instructions.next();
String baseClass = instruction.getUriString();
assertEquals(2, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(2, ontology.getClassesInSignature().size());
Set<OWLEntity> entities = ontology.getEntitiesInSignature(IRI.create(baseClass));
assertEquals(1, entities.size());
OWLEntity entity = entities.iterator().next();
assertTrue(entity.isOWLClass());
OWLClass owlClass = entity.asOWLClass();
Set<OWLClassAxiom> axioms = ontology.getAxioms(owlClass, Imports.INCLUDED);
assertEquals(1, axioms.size());
assertEquals("SubClassOf", axioms.iterator().next().getAxiomType().getName());
}
@Test
public void testMakeNewSubclassProperty() throws Exception {
String test = "+:http://www.ifomis.org/bfo/1.1#Entity\n"
+ "+(http://www.ifomis.org/bfo/1.1
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
String baseClass = null;
for (Instruction instruction : irisToSave) {
if (instruction.getUriString().endsWith("#MaterialEntity"))
baseClass = instruction.getUriString();
}
assertEquals(3, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(2, ontology.getClassesInSignature().size());
Set<OWLEntity> entities = ontology.getEntitiesInSignature(IRI.create(baseClass));
assertEquals(1, entities.size());
OWLEntity entity = entities.iterator().next();
assertTrue(entity.isOWLClass());
OWLClass owlClass = entity.asOWLClass();
Set<OWLClassAxiom> axioms = ontology.getAxioms(owlClass, Imports.INCLUDED);
assertEquals(1, axioms.size());
assertEquals("SubClassOf", axioms.iterator().next().getAxiomType().getName());
}
@Test
public void testMakeNewSuperClassFromOtherOntology() throws Exception {
String test = "+(http://purl.obolibrary.org/obo/CHEBI_23367):http://www.ifomis.org/bfo/1.1/snap#MaterialEntity";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
Iterator<Instruction> iterator = irisToSave.iterator();
Instruction instruction = iterator.next();
while (!instruction.getUriString().equals("http://www.ifomis.org/bfo/1.1/snap#MaterialEntity") && iterator.hasNext())
instruction = iterator.next();
String baseClass = instruction.getUriString();
assertEquals(2, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bfo-1.1.owl");
Slimmer slimmer = new Slimmer(stream);
slimmer.removeAllExcept(irisToSave);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(2, ontology.getClassesInSignature().size());
Set<OWLEntity> entities = ontology.getEntitiesInSignature(IRI.create(baseClass));
assertEquals(1, entities.size());
Iterator<OWLEntity> entityIter = entities.iterator();
OWLEntity entity = entityIter.next();
assertTrue(entity.isOWLClass());
OWLClass owlClass = entity.asOWLClass();
assertEquals(1, Searcher.sup(ontology.subClassAxiomsForSubClass(owlClass)).count());
Set<OWLClassAxiom> axioms = ontology.getAxioms(owlClass, Imports.INCLUDED);
assertEquals("SubClassOf", axioms.iterator().next().getAxiomType().getName());
}
@Test
public void testNotRemoveDeclaredProperties() throws Exception {
String test = "+:http://purl.obolibrary.org/obo/uo#is_unit_of";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertNotNull(irisToSave);
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("uo.owl");
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3203, ontology.getAxiomCount());
// test the removing; should result in exactly one less axiom
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(68, ontology.getAxiomCount());
}
@Test
public void testRemoveDeclaredProperties() throws Exception {
Configuration conf = new Configuration();
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertNotNull(irisToSave);
assertEquals(0, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("uo.owl");
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(3203, ontology.getAxiomCount());
// test the removing; should result in exactly one less axiom
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(67, ontology.getAxiomCount());
}
@Test
public void testRemoveMoreDeclaredProperties() throws Exception {
String test = "+:http://www.bioassayontology.org/bao#BAO_0000555";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
Set<Instruction> irisToSave = conf.getTreePartsToSave();
assertNotNull(irisToSave);
assertEquals(1, conf.getTreePartsToSave().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bao_core.owl");
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(899, ontology.getAxiomCount());
// test the removing; should result in exactly one less axiom
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(46, ontology.getAxiomCount());
}
@Test
public void testRemoveSpecificProperty() throws Exception {
String test = "-:http://www.bioassayontology.org/bao#BAO_0000335";
Configuration conf = new Configuration();
conf.read(new StringReader(test));
assertEquals(1, conf.getTreePartsToRemove().size());
InputStream stream = this.getClass().getClassLoader().getResourceAsStream("bao_core.owl");
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(899, ontology.getAxiomCount());
// test the removing; should result in exactly one less axiom
slimmer.removeAll(conf.getTreePartsToRemove());
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(895, ontology.getAxiomCount());
}
@Test
public void testExtraNamespaces() throws Exception {
Configuration conf = new Configuration();
Set<Instruction> irisToSave = conf.getTreePartsToSave();
String ontoFile = "uo.owl";
InputStream stream = this.getClass().getClassLoader().getResourceAsStream(ontoFile);
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(67, ontology.getAxiomCount());
ByteArrayOutputStream output = new ByteArrayOutputStream();
slimmer.saveAs(output, ontoFile);
String owlOutput = output.toString();
System.out.println(owlOutput);
assertTrue(owlOutput.contains("xmlns:ncicp"));
}
@Test
public void testSlimmingVersionAnnotation() throws Exception {
Configuration conf = new Configuration();
Set<Instruction> irisToSave = conf.getTreePartsToSave();
String ontoFile = "uo.owl";
InputStream stream = this.getClass().getClassLoader().getResourceAsStream(ontoFile);
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(67, ontology.getAxiomCount());
ByteArrayOutputStream output = new ByteArrayOutputStream();
slimmer.saveAs(output, ontoFile);
String owlOutput = output.toString();
assertTrue(owlOutput.contains("This SLIM file"));
}
@Test
public void testSourceAnnotation() throws Exception {
Configuration conf = new Configuration();
Set<Instruction> irisToSave = conf.getTreePartsToSave();
String ontoFile = "uo.owl";
InputStream stream = this.getClass().getClassLoader().getResourceAsStream(ontoFile);
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(67, ontology.getAxiomCount());
ByteArrayOutputStream output = new ByteArrayOutputStream();
slimmer.saveAs(output, ontoFile);
String owlOutput = output.toString();
assertTrue(owlOutput.contains("pav:importedFrom"));
}
@Test
public void testGenerationDate() throws Exception {
Configuration conf = new Configuration();
Set<Instruction> irisToSave = conf.getTreePartsToSave();
String ontoFile = "uo.owl";
InputStream stream = this.getClass().getClassLoader().getResourceAsStream(ontoFile);
Slimmer slimmer = new Slimmer(stream);
OWLOntology ontology = slimmer.getOntology();
slimmer.removeAllExcept(irisToSave);
ontology = slimmer.getOntology();
assertNotNull(ontology);
assertEquals(67, ontology.getAxiomCount());
ByteArrayOutputStream output = new ByteArrayOutputStream();
slimmer.saveAs(output, ontoFile);
String owlOutput = output.toString();
System.out.println("Output: " + owlOutput);
assertTrue(owlOutput.contains(">2022-"), "Did not find the generation data."); // TODO: update every year :)
}
}
|
package org.rcsb.mmtf.decoder;
import org.rcsb.mmtf.api.StructureDataInterface;
import org.rcsb.mmtf.api.StructureAdapterInterface;
/**
* A class of static functions to be used to aid decoding from
* {@link StructureDataInterface} to {@link StructureAdapterInterface}.
* @author Anthony Bradley
*
*/
public class DecoderUtils {
/**
* Parses the bioassembly data and inputs it to the structure inflator
* @param dataApi the interface to the decoded data
* @param structInflator the interface to put the data into the client object
*/
public static void generateBioAssembly(StructureDataInterface dataApi, StructureAdapterInterface structInflator) {
for (int i=0; i<dataApi.getNumBioassemblies(); i++) {
for(int j=0; j<dataApi.getNumTransInBioassembly(i); j++) {
structInflator.setBioAssemblyTrans(i, dataApi.getChainIndexListForTransform(i, j), dataApi.getMatrixForTransform(i,j), Integer.toString(i+1));
}
}
}
/**
* Generate inter group bonds.
* Bond indices are specified within the whole structure and start at 0.
* @param dataApi the interface to the decoded data
* @param structInflator the interface to put the data into the client object
*/
public static void addInterGroupBonds(StructureDataInterface dataApi, StructureAdapterInterface structInflator) {
for (int i = 0; i < dataApi.getInterGroupBondOrders().length; i++) {
structInflator.setInterGroupBond(dataApi.getInterGroupBondIndices()[i * 2],
dataApi.getInterGroupBondIndices()[i * 2 + 1], dataApi.getInterGroupBondOrders()[i]);
}
}
/**
* Add ancilliary header information to the structure.
* @param dataApi the interface to the decoded data
* @param structInflator the interface to put the data into the client object
*/
public static void addHeaderInfo(StructureDataInterface dataApi, StructureAdapterInterface structInflator) {
structInflator.setHeaderInfo(dataApi.getRfree(),dataApi.getRwork(), dataApi.getResolution(),
dataApi.getTitle(), dataApi.getDepositionDate(), dataApi.getReleaseDate(), dataApi.getExperimentalMethods());
}
/**
* Add the crystallographic data to the structure.
* @param dataApi the interface to the decoded data
* @param structInflator the interface to put the data into the client object
*/
public static void addXtalographicInfo(StructureDataInterface dataApi, StructureAdapterInterface structInflator) {
if(dataApi.getUnitCell()!=null){
structInflator.setXtalInfo(dataApi.getSpaceGroup(), dataApi.getUnitCell(), dataApi.getNcsOperatorList());
}
}
/**
* Add the entity info to the structure.
* @param dataApi the interface to the decoded data
* @param structInflator the interface to put the data into the client object
*/
public static void addEntityInfo(StructureDataInterface dataApi, StructureAdapterInterface structInflator) {
for (int i=0; i<dataApi.getNumEntities(); i++) {
structInflator.setEntityInfo(dataApi.getEntityChainIndexList(i), dataApi.getEntitySequence(i), dataApi.getEntityDescription(i), dataApi.getEntityType(i));
}
}
}
|
package liquibase.serializer.core.string;
import liquibase.configuration.GlobalConfiguration;
import liquibase.configuration.LiquibaseConfiguration;
import liquibase.database.Database;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.serializer.LiquibaseSerializable;
import liquibase.serializer.SnapshotSerializer;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.SnapshotControl;
import liquibase.structure.CatalogLevelObject;
import liquibase.structure.DatabaseLevelObject;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import liquibase.util.StringUtils;
import java.io.*;
import java.util.*;
public class StringSnapshotSerializerReadable implements SnapshotSerializer {
private static final int INDENT_LENGTH = 4;
@Override
public String[] getValidFileExtensions() {
return new String[]{"txt"};
}
@Override
public String serialize(LiquibaseSerializable object, boolean pretty) {
try {
StringBuilder buffer = new StringBuilder();
DatabaseSnapshot snapshot = ((DatabaseSnapshot) object);
Database database = snapshot.getDatabase();
buffer.append("Database snapshot for ").append(database.getConnection().getURL()).append("\n");
addDivider(buffer);
buffer.append("Database type: ").append(database.getDatabaseProductName()).append("\n");
buffer.append("Database version: ").append(database.getDatabaseProductVersion()).append("\n");
buffer.append("Database user: ").append(database.getConnection().getConnectionUserName()).append("\n");
SnapshotControl snapshotControl = snapshot.getSnapshotControl();
List<Class> includedTypes = sort(snapshotControl.getTypesToInclude());
buffer.append("Included types:\n" ).append(StringUtils.indent(StringUtils.join(includedTypes, "\n", new StringUtils.StringUtilsFormatter<Class>() {
@Override
public String toString(Class obj) {
return obj.getName();
}
}))).append("\n");
List<Schema> schemas = sort(snapshot.get(Schema.class), new Comparator<Schema>() {
@Override
public int compare(Schema o1, Schema o2) {
return o1.toString().compareTo(o2.toString());
}
});
for (Schema schema : schemas) {
if (database.supportsSchemas()) {
buffer.append("\nCatalog & Schema: ").append(schema.getCatalogName()).append(" / ").append(schema.getName()).append("\n");
} else {
buffer.append("\nCatalog: ").append(schema.getCatalogName()).append("\n");
}
StringBuilder catalogBuffer = new StringBuilder();
for (Class type : includedTypes) {
if (type.equals(Schema.class) || type.equals(Catalog.class) || type.equals(Column.class)) {
continue;
}
List<DatabaseObject> objects = new ArrayList<DatabaseObject>(snapshot.get(type));
ListIterator<DatabaseObject> iterator = objects.listIterator();
while (iterator.hasNext()) {
DatabaseObject next = iterator.next();
if (next instanceof DatabaseLevelObject) {
continue;
}
Schema objectSchema = next.getSchema();
if (objectSchema == null) {
if (!(next instanceof CatalogLevelObject) || !((CatalogLevelObject) next).getCatalog().equals(schema.getCatalog())) {
iterator.remove();
}
} else if (!objectSchema.equals(schema)) {
iterator.remove();
}
}
outputObjects(objects, type, catalogBuffer);
}
buffer.append(StringUtils.indent(catalogBuffer.toString(), INDENT_LENGTH));
}
return buffer.toString().replace("\r\n", "\n").replace("\r", "\n"); //standardize all newline chars
} catch (Exception e) {
throw new UnexpectedLiquibaseException(e);
}
}
protected void outputObjects(List objects, Class type, StringBuilder catalogBuffer) {
List<? extends DatabaseObject> databaseObjects = sort(objects);
if (databaseObjects.size() > 0) {
catalogBuffer.append(type.getName()).append(":\n");
StringBuilder typeBuffer = new StringBuilder();
for (DatabaseObject databaseObject : databaseObjects) {
typeBuffer.append(databaseObject.getName()).append("\n");
typeBuffer.append(StringUtils.indent(serialize(databaseObject, null), INDENT_LENGTH)).append("\n");
}
catalogBuffer.append(StringUtils.indent(typeBuffer.toString(), INDENT_LENGTH)).append("\n");
}
}
private String serialize(final DatabaseObject databaseObject, final DatabaseObject parentObject) {
StringBuilder buffer = new StringBuilder();
final List<String> attributes = sort(databaseObject.getAttributes());
for (String attribute : attributes) {
if (attribute.equals("name")) {
continue;
}
if (attribute.equals("schema")) {
continue;
}
if (attribute.equals("catalog")) {
continue;
}
Object value = databaseObject.getAttribute(attribute, Object.class);
if (value instanceof Schema) {
continue;
}
if (value instanceof DatabaseObject) {
if (parentObject != null && ((DatabaseObject) value).getSnapshotId() != null && ((DatabaseObject) value).getSnapshotId().equals(parentObject.getSnapshotId())) {
continue;
}
boolean expandContainedObjects = shouldExpandNestedObject(value, databaseObject);
if (expandContainedObjects) {
value = ((DatabaseObject) value).getName()+"\n"+StringUtils.indent(serialize((DatabaseObject) value, databaseObject), INDENT_LENGTH);
} else {
value = databaseObject.getSerializableFieldValue(attribute);
}
} else if (value instanceof Collection) {
if (((Collection) value).size() == 0) {
value = null;
} else {
if (((Collection) value).iterator().next() instanceof DatabaseObject) {
value = StringUtils.join(new TreeSet<DatabaseObject>((Collection<DatabaseObject>) value), "\n", new StringUtils.StringUtilsFormatter() {
@Override
public String toString(Object obj) {
if (obj instanceof DatabaseObject) {
if (shouldExpandNestedObject(obj, databaseObject)) {
return ((DatabaseObject) obj).getName()+"\n"+StringUtils.indent(serialize(((DatabaseObject) obj), databaseObject), INDENT_LENGTH);
} else {
return ((DatabaseObject) obj).getName();
}
} else {
return obj.toString();
}
}
});
value = "\n"+StringUtils.indent((String) value, INDENT_LENGTH);
} else {
value = databaseObject.getSerializableFieldValue(attribute);
}
}
} else {
value = databaseObject.getSerializableFieldValue(attribute);
}
if (value != null) {
buffer.append(attribute).append(": ").append(value).append("\n");
}
}
return buffer.toString().replaceFirst("\n$", "");
}
protected boolean shouldExpandNestedObject(Object nestedValue, DatabaseObject container) {
return container instanceof Table || container instanceof View;
}
protected void addDivider(StringBuilder buffer) {
buffer.append("
}
private List sort(Collection objects) {
return sort(objects, new Comparator() {
@Override
public int compare(Object o1, Object o2) {
if (o1 instanceof Comparable) {
return ((Comparable) o1).compareTo(o2);
} else if (o1 instanceof Class) {
return ((Class) o1).getName().compareTo(((Class) o2).getName());
} else {
throw new ClassCastException(o1.getClass().getName()+" cannot be cast to java.lang.Comparable or java.lang.Class");
}
}
});
}
private <T> List<T> sort(Collection objects, Comparator<T> comparator) {
List returnList = new ArrayList(objects);
Collections.sort(returnList, comparator);
return returnList;
}
@Override
public void write(DatabaseSnapshot snapshot, OutputStream out) throws IOException {
out.write(serialize(snapshot, true).getBytes(LiquibaseConfiguration.getInstance().getConfiguration(GlobalConfiguration.class).getOutputEncoding()));
}
@Override
public int getPriority() {
return PRIORITY_DEFAULT;
}
}
|
package com.sri.ai.test.praise.lbp;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Marker;
import com.sri.ai.expresso.api.Expression;
import com.sri.ai.expresso.core.DefaultSymbol;
import com.sri.ai.expresso.helper.Expressions;
import com.sri.ai.grinder.GrinderConfiguration;
import com.sri.ai.grinder.api.Rewriter;
import com.sri.ai.grinder.api.RewritingProcess;
import com.sri.ai.grinder.library.FunctorConstants;
import com.sri.ai.grinder.library.number.Times;
import com.sri.ai.grinder.library.set.intensional.IntensionalSet;
import com.sri.ai.grinder.library.set.tuple.Tuple;
import com.sri.ai.praise.LPIUtil;
import com.sri.ai.praise.PRAiSEConfiguration;
import com.sri.ai.praise.lbp.LBPConfiguration;
import com.sri.ai.praise.lbp.LBPFactory;
import com.sri.ai.praise.lbp.LBPQueryEngine;
import com.sri.ai.praise.lbp.LBPQueryEngine.QueryStep;
import com.sri.ai.praise.lbp.LBPRewriter;
import com.sri.ai.praise.model.Model;
import com.sri.ai.praise.model.example.IntensionalFanIn;
import com.sri.ai.praise.model.example.TrivialEpidemicAndSickNotbob;
import com.sri.ai.praise.model.example.TrivialEpidemicSickEveryone;
import com.sri.ai.praise.model.example.TrivialEpidemicSickEveryoneNotbobAmaryAjohn;
import com.sri.ai.praise.model.example.TrivialEpidemicSickbob;
import com.sri.ai.praise.model.example.TrivialGaveTreasureToOwnsRich;
import com.sri.ai.praise.model.example.TrivialLoopyFriendsAnnBobAndSmokerBobExample;
import com.sri.ai.praise.model.example.TrivialLoopyMisconceptionExample;
import com.sri.ai.praise.model.example.TrivialLoopyPQ;
import com.sri.ai.praise.model.example.TrivialLoopyPQWithPriors;
import com.sri.ai.praise.model.example.TrivialLoopyPQandb;
import com.sri.ai.praise.model.example.TrivialLoopyParfactorsExample;
import com.sri.ai.praise.model.example.TrivialPQ;
import com.sri.ai.praise.model.example.TrivialPQPeoplea1Anda2;
import com.sri.ai.praise.model.example.TrivialPQR;
import com.sri.ai.praise.model.example.TrivialPQRWithPriors;
import com.sri.ai.praise.model.example.TrivialPQWithPArity2AndQArity1;
import com.sri.ai.praise.model.example.TrivialPQWithPriors;
import com.sri.ai.praise.model.example.TrivialPRWithNonDeterministicFactor;
import com.sri.ai.praise.model.example.TrivialPeopleAmericanTallIntelligentUnintelligent;
import com.sri.ai.praise.model.example.TrivialSickSmokerbob;
import com.sri.ai.praise.model.example.TrivialSickbob;
import com.sri.ai.praise.model.example.TrivialSunnyAvailableCanPlayWith;
import com.sri.ai.praise.model.example.TrivialThereExistsPQWithPriors;
import com.sri.ai.praise.model.example.WeightedPQWithPriors;
import com.sri.ai.praise.model.example.WeightedThereExistsPQWithPriors;
import com.sri.ai.test.praise.AbstractLPITest;
import com.sri.ai.util.Util;
import com.sri.ai.util.base.Pair;
@SuppressWarnings("unused")
public class LBPTest extends AbstractLPITest {
@Test
public void testNewLBPProcess() {
Expression testExpression = parse("1 + 1");
RewritingProcess process = LBPFactory.newLBPProcess(testExpression);
Expression result = process.rewrite(LBPRewriter.R_basic, testExpression);
System.out.println(result);
}
@SuppressWarnings({ "unchecked" })
@Test
public void testFindRandomVariableValueExpressionsThatAreNotAGivenOne() {
Expression expression;
Expression randomVariableValue;
RewritingProcess process;
List<Pair<Expression,Expression>> expected;
List<Pair<Expression,Expression>> otherRandomVariableValuesAndContexts;
Model model;
model = new Model(
"union({{(on X in People) [if epidemic then if sick(X) then 0.4 else 0.6 else if sick(X) then 0.01 else 0.99]}})",
"epidemic", "sick"
);
expression = parse("if sick(X) and sick(Y) then if Z != X then (if epidemic and sick(Z) then 1 else 0) else (if sick(Z) and sick(W) and sick(X) then 1 else 0) else 0");
randomVariableValue = parse("sick(X)");
expected = Util.list(
Util.pair(parse("sick(Y)"), parse("true")),
Util.pair(parse("epidemic"), parse("Z != X")),
Util.pair(parse("sick(Z)"), parse("Z != X")),
Util.pair(parse("sick(W)"), parse("not (Z != X)"))
);
process = LBPFactory.newLBPProcess(expression);
Model.setRewritingProcessesModel(parse(model.getModelDeclaration()), model.getKnownRandomVariableNames(), process);
otherRandomVariableValuesAndContexts = LPIUtil.findRandomVariableValueExpressionsThatAreNotNecessarilyTheSameAsAGivenOne(expression, randomVariableValue, process);
assertEquals(expected, otherRandomVariableValuesAndContexts);
}
@Test
public void testDifferenceOfExtensionalAndIntensionalSet() {
class DifferenceOfExtensionalAndIntensionalSetTestData extends TestData {
private String extA, intB;
private Expression exprA, exprB;
public DifferenceOfExtensionalAndIntensionalSetTestData(String contextualConstraint, Model model, String extA, String intB, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.extA = extA;
this.intB = intB;
};
@Override
public Expression getTopExpression() {
this.exprA = parse(extA);
this.exprB= parse(intB);
return Expressions.apply("-", exprA, exprB);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_DifferenceOfExtensionalAndIntensionalSet,
LPIUtil.argForDifferenceOfExtensionalAndIntensionalSetRewriteCall(exprA, exprB, 0));
}
};
TestData[] tests = new TestData[] {
// Basic: Empty Difference
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{}", "{(on X) X | X = a or X = b}",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{}", "{{(on X) X | X = a or X = b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{}}", "{(on X) X | X = a or X = b}",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{}}", "{{(on X) X | X = a or X = b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a}", "{(on X) X | X = a }",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a}", "{{(on X) X | X = a }}",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a}}", "{(on X) X | X = a }",
false,
"{ }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a}}", "{{(on X) X | X = a }}",
false,
"{ }"),
// Basic: No Difference
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a}", "{(on X) X | X != a }",
false,
"{a}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a}", "{{(on X) X | X != a }}",
false,
"{a}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a}}", "{(on X) X | X != a }",
false,
"{{a}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a}}", "{{(on X) X | X != a }}",
false,
"{{a}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, a}}", "{(on X) X | X != a }",
false,
"{{a, a}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, a}}", "{{(on X) X | X != a }}",
false,
"{{a, a}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a, b}", "{(on X) X | X != a and X != b}",
false,
"{a, b}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a, b}", "{{(on X) X | X != a and X != b}}",
false,
"{a, b}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, b}}", "{(on X) X | X != a and X != b}",
false,
"{{a, b}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, b}}", "{{(on X) X | X != a and X != b}}",
false,
"{{a, b}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, b, b}}", "{(on X) X | X != a and X != b}",
false,
"{{a, b, b}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{a, b, b}}", "{{(on X) X | X != a and X != b}}",
false,
"{{a, b, b}}"),
// Basic: Conditional
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{Y, c}", "{(on X) X | X = a or X = b}",
false,
"if Y = a or Y = b then {c} else { Y, c }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{Y, c}", "{{(on X) X | X = a or X = b}}",
false,
"if Y = a or Y = b then {c} else { Y, c }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{Y, c}}", "{(on X) X | X = a or X = b}",
false,
"if Y = a or Y = b then {{c}} else {{Y,c}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{Y, c}}", "{{(on X) X | X = a or X = b}}",
false,
"if Y = a or Y = b then {{c}} else {{Y,c}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{Y, Y}", "{(on X) X | X = a or X = b}",
false,
"if Y = a or Y = b then { } else { Y, Y }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{Y, Y}", "{{(on X) X | X = a or X = b}}",
false,
"if Y = a or Y = b then { } else { Y, Y }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{Y, Y}}", "{(on X) X | X = a or X = b}",
false,
"if Y = a or Y = b then { } else {{ Y, Y }}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{Y, Y}}", "{{(on X) X | X = a or X = b}}",
false,
"if Y = a or Y = b then { } else {{ Y, Y }}"),
// Basic: Conditional - known true then known false
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{b, c}", "{(on X) X | X = b or X != c}",
false,
"{c}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{b, c}", "{{(on X) X | X = b or X != c}}",
false,
"{c}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{b, c}}", "{(on X) X | X = b or X != c}",
false,
"{{c}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{b, c}}", "{{(on X) X | X = b or X != c}}",
false,
"{{c}}"),
// Basic: Standardize Apart
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{X, c}", "{(on X) X | X = a or X = b}",
false,
"if X = a or X = b then {c} else { X, c }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{X, c}", "{{(on X) X | X = a or X = b}}",
false,
"if X = a or X = b then {c} else { X, c }"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{X, c}}", "{(on X) X | X = a or X = b}",
false,
"if X = a or X = b then {{c}} else {{X,c}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{X, c}}", "{{(on X) X | X = a or X = b}}",
false,
"if X = a or X = b then {{c}} else {{X,c}}"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{{(on X) X | X = a or X = b}}", "{{(on Y) Y | Y = c or Y = d}}",
true,
"N/A"),
new DifferenceOfExtensionalAndIntensionalSetTestData(Expressions.TRUE.toString(), null,
"{a, b}", "{a}",
true,
"N/A"),
};
//perform(new TestData[] {
// new DifferenceOfExtensionalAndIntensionalSetTestData("{Y, c}", "{(on X) X | X = a or X = b}", "if Y = a or Y = b then {c} else if Y = c then { c } else { Y, c }"),
perform(tests);
}
@Test
public void testDifferenceOfExtensionalAndExtensionalSet() {
class DifferenceOfExtensionalAndExtensionalSetTestData extends TestData {
private String extA, extB;
private Expression exprA, exprB;
public DifferenceOfExtensionalAndExtensionalSetTestData(String contextualConstraint, Model model, String extA, String extB, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.extA = extA;
this.extB = extB;
};
@Override
public Expression getTopExpression() {
this.exprA = parse(extA);
this.exprB= parse(extB);
return Expressions.apply("-", exprA, exprB);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_DifferenceOfExtensionalAndExtensionalSet,
LPIUtil.argForDifferenceOfExtensionalAndExtensionalSetRewriteCall(exprA, exprB, 0, 0));
}
};
TestData[] tests = new TestData[] {
// Empty difference
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a}", "{}",
false,
"{a}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "{}",
false,
"{a, b}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A}", "{}",
false,
"{A}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A, B}", "{}",
false,
"{ A, B }"),
// Defined same order difference
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a}", "{b}",
false,
"{a}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "{b}",
false,
"{a}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{b}",
false,
"{a, c}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{b, c}",
false,
"{a}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "{a, b}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{a, b, c}",
false,
"{}"),
// Defined different orderings that should give {}
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "{b, a}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{a, c, b}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{b, a, c}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{b, c, a}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{c, a, b}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{c, b, a}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{a, b, d, c}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{a, c, b, d}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{d, c, b, a}",
false,
"{}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{c, d, a, b}",
false,
"{}"),
// Multi-Set Differences that should give {{}}
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b}}", "{{a, b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b}}", "{{b, a}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{a, b, c}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{a, c, b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{b, a, c}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{b, c, a}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{c, a, b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{c, b, a}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c, d}}", "{{a, b, d, c}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c, d}}", "{{a, c, b, d}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c, d}}", "{{d, c, b, a}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c, d}}", "{{c, d, a, b}}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a}}", "{a}",
false,
"{ }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b}}", "{a, b}",
false,
"{ }"),
// Multi-Set Differences that should give remaining elements
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a}}", "{{ a }}",
false,
"{{ a }}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{{c, d, a, b}}",
false,
"{{ a, b, c, d }}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{{b, c, a, d, c, a, b, c}}",
false,
"{{ d }}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a}}", "{ a }",
false,
"{{ a }}"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{c, d, a, b}",
false,
"{{ a, b, c, d }}"),
// Basic conditional expected
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a}", "{B}",
false,
"if B = a then { } else { a }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A}", "{B}",
false,
"if A = B then { } else { A }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A, B}", "{C}",
false,
// Note: old R_basic result - has unreachable branch:
// "if A = C then if A = B then { } else { B } else if B = C then { A } else if A = B then { A } else { A, B }"
"if A = C then if B = C then { } else { B } else if B = C then { A } else { A, B }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A, B, C}", "{D, C}",
false,
// Note: old R_basic result - has unreachable branch:
// "if A = D then if A = B then { } else if B = C then { } else { B } else if A = C then if B = D then { } else if A = B then { } else { B } else if B = D then { A } else if B = C then { A } else if A = B then { A } else { A, B }"
"if A = D then if B = D then { } else if B = C then { } else { B } else if A = C then if B = D then { } else if B = C then { } else { B } else if B = D then { A } else if B = C then { A } else { A, B }"),
// Complex conditional expected
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A, B, if A = B then C else D}",
"{C}",
false,
// Note: old R_basic result - has unreachable branches:
// "if A = C then if A = B then { } else if A = D then { B } else if B = D then { B } else { B, D } else if B = C then if A = B then { A } else if B = D then { A } else if A = D then { A } else { A, D } else if A = B then { A } else if C = D then { A, B } else if A = D then { A, B } else if B = D then { A, B } else { A, B, D }"
// Note: new R_simplify result also has unreachable branches (to be expected as known to be incomplete).
// Note: introduction of FromConditionalFormulaToFormula shortens the expression further
// Used to be: if A = C then if B = C then if A = B then { } else if D = C then { } else { D } else if A = B then { B } else if D = C then { B } else { B, D } else if B = C then if A = B then { A } else if D = C then { A } else { A, D } else if A = B then { A, B } else if D = C then { A, B } else { A, B, D }
"if A = C then if B = C then { } else if A = B or A != B and D = C then { B } else { B, D } else if B = C then if A = B or A != B and D = C then { A } else { A, D } else if A = B or A != B and D = C then { A, B } else { A, B, D }"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a}", "a",
true,
"N/A"),
new DifferenceOfExtensionalAndExtensionalSetTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{a}",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testIn() {
class InTestData extends TestData {
private String Alpha, Set;
private Expression exprAlpha, exprSet;
public InTestData(String contextualConstraint, Model model, String Alpha, String Set, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.Alpha = Alpha;
this.Set = Set;
};
@Override
public Expression getTopExpression() {
this.exprAlpha = parse(Alpha);
this.exprSet = parse(Set);
return Expressions.apply("In", exprAlpha, exprSet);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_in, LPIUtil.argForInRewriteCall(exprAlpha, exprSet));
}
};
TestData[] tests = new TestData[] {
// Basic: Extensional Uniset In - false
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{b}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{b, c}",
false,
"false"),
// Basic: Extensional Multiset In - false
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{}}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{b}}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{b, c}}",
false,
"false"),
// Basic: Extensional Uniset In - true
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{a}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{a, b}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{b, a, c}",
false,
"true"),
// Basic: Extensional Multiset In - true
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{a}}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{a, b}}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{b, a, c}}",
false,
"true"),
// Basic: Extensional Uniset In - condition
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{a}",
false,
"A = a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{a, b}",
false,
"A = a or A = b"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{b, a, c}",
false,
"A = b or A = a or A = c"),
// Basic: Extensional Multiset In - condition
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{a}}",
false,
"A = a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{a, b}}",
false,
"A = a or A = b"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{b, a, c}}",
false,
"A = b or A = a or A = c"),
// Ensure duplicates simplified out
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{b, a, c, b, a, c}}",
false,
"A = b or A = a or A = c"),
// Basic: Intensional Uniset In - false
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{(on X) X | X != a }",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{(on X) X | X = b }",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{(on X) X | X = b or X = c }",
false,
"false"),
// Basic: Intensional Uniset In - true
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{(on X) X | X = a }",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{(on X) X | X = b or X = a }",
false,
"true"),
// Basic: Intensional Multiset In - false
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{(on X) X | X != a }}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{(on X) X | X = b }}",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{(on X) X | X = b or X = c }}",
false,
"false"),
// Basic: Intensional Multiiset In - true
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{(on X) X | X = a }}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "{{(on X) X | X = b or X = a }}",
false,
"true"),
// Basic: Intensional Uniset In - condition
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{(on X) X}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{(on X) X | X != b }",
false,
"A != b"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{(on X) X | X != b and X != c }",
false,
"A != b and A != c"),
// Check Standardize Apart
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"X", "{(on X) X | X != b and X != c }",
false,
"X != b and X != c"),
// Basic: Intensional Multiset In - condition
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{(on X) X}}",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{(on X) X | X != b }}",
false,
"A != b"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"A", "{{(on X) X | X != b and X != c }}",
false,
"A != b and A != c"),
// Check Standardize Apart
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"X", "{{(on X) X | X != b and X != c }}",
false,
"X != b and X != c"),
// Basic: Externalize conditional - Alpha argument
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then a else b", "{a}",
false,
// Note: instead of 'if X = a then true else false' this
// will be further simplified to:
"X = a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then b else a", "{a}",
false,
"X != a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then if Y = b then a else b else if Y = c then b else c", "{a}",
false,
"X = a and Y = b"),
// Basic: Externalize conditional - Set argument
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "if X = a then {a} else {b}",
false,
// Note: instead of 'if X = a then true else false' this
// will be further simplified to:
"X = a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "if X = a then {b} else {a}",
false,
"X != a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "if X = a then if Y = b then {a} else {b} else if Y = c then {b} else {c}",
false,
"X = a and Y = b"),
// Basic: Externalize conditional - Set contains conditional argument
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union(if X = a then {a} else {b}, {c})",
false,
"X = a"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union(if X = a then {a} else {b}, {a})",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union(if X = a then {a} else {b}, if Y = a then {a} else {b})",
false,
"X = a or Y = a"),
// Basic: Set is a Union
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union()",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union({a})",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union({b})",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union({b}, {a})",
false,
"true"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "union({b}, {c})",
false,
"false"),
new InTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"a", "1+1",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testSetDifference() {
class SetDifferenceTestData extends TestData {
private String S1, S2;
private Expression exprS1, exprS2;
public SetDifferenceTestData(String contextualConstraint, Model model, String S1, String S2, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.S1 = S1;
this.S2 = S2;
};
@Override
public Expression getTopExpression() {
this.exprS1 = parse(S1);
this.exprS2 = parse(S2);
return Expressions.apply("-", exprS1, exprS2);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_set_diff,
LPIUtil.argForSetDifferenceRewriteCall(exprS1, exprS2));
}
};
TestData[] tests = new TestData[] {
// Basic: is S1 is the empty set
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{a, b}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{{a, b}}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{ (on X) X | X != a }",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{{ (on X) X | X != a }}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union()", "{a, b}",
false,
"{}"),
// Basic: if S2 is the empty set
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "{}",
false,
"{a, b}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b}}", "{}",
false,
"{{a, b}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{ (on X) X | X != a }", "{}",
false,
"{ (on X) X | X != a }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ (on X) X | X != a }}", "{}",
false,
"{{ (on X) X | X != a }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b}", "union()",
false,
"{a, b}"),
// Basic: if S1 is S11 union S1rest, where S1i and S2 are unisets
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b} union {c, d}", "{b, d}",
false,
"{a} union {c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({c, d})", "{b, d}",
false,
"{c} union {}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union()", "{b, d}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b} union {c, d} union {e, f} ", "{b, c, e}",
false,
"union({a}, {d} union {f})"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b} union {c, d} union {e, f} union {g, h} ", "{b, c, e, h}",
false,
"union({a}, union({d}, {f} union {g}))"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b} union {c, d} union {e, f} union {g, h} union {i, j} ", "{b, c, e, h, i}",
false,
"union({a}, union({d}, union({f}, {g} union {j})))"),
// Basic: if S2 is S21 union S2rest, where S1 and S2i are unisets
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{b} union {d}",
false,
"{a, c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "union({d})",
false,
"{a, b, c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "union()",
false,
"{a, b, c, d}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{b} union {d} union {a}",
false,
"{c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d}", "{b} union {d} union {a} union {c}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d, e}", "{b} union {d} union {a} union {c}",
false,
"{e}"),
// Basic: if S1 is S11 union S1rest, where S1i are multisets guaranteed to have unique elements, or a singleton, and S2 is a singleton { b }
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }} union {{(on X) X | X != c }}", "{b}",
false,
"{{ ( on X ) X | X != a and X != b }} union {{ ( on X ) X | X != c }}"),
// R_in is known to be false for the first union argument
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }} union {{(on X) X | X != c }}", "{a}",
false,
"{{ ( on X ) X | X != a }} union {{ ( on X ) X | X != c and X != a }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }} union {{(on Y) [ if p(Y) then 1 else 0 ] | Y != b }}",
"{ [ if q(Z) then 1 else 0 ] }",
false,
"{{ ( on X ) ([ if p(X) then 1 else 0 ]) | X != a }} union {{ ( on Y ) ([ if p(Y) then 1 else 0 ]) | Y != b }}"),
// A union of a multiset and singleton
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }} union { [ if q(Z) then 1 else 0 ] }",
"{ [ if q(Z) then 1 else 0 ] }",
false,
"{{ ( on X ) ([ if p(X) then 1 else 0 ]) | X != a }}"),
// A singleton multiset
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }} union {{(on X) X | X != c }}", "{{a}}",
false,
"{{ ( on X ) X | X != a }} union {{ ( on X ) X | X != c and X != a }}"),
// Basic: if S1 is {{a1,...,an}} and S2 is { b }
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{}}", "{a}",
false,
"{ }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{b}",
false,
"{{a, c}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{A, B}}", "{C}",
false,
"if A = C then if B = C then { } else {{ B }} else if B = C then {{ A }} else {{ A, B }}"),
// A singleton multiset
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, b, c}}", "{{b}}",
false,
"{{a, c}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
" {{ [ if p(X) then 1 else 0 ] }}", "{ [ if q(X) then 1 else 0 ] }",
false,
"{{ ([ if p(X) then 1 else 0 ]) }}"),
// Basic: if S1 is {{ Alpha | C }}_I and S2 is { b }
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{b}",
false,
"{{(on X) X | X != a and X != b}}"),
// Note: In truth the result should be a multisets of all 'a' elements the size of the domain of X - 1.
// However, not currently considered a problem as the multisets
// passed to this rewriter are suppose to guarantee to have unique elements.
// Keeping this test to highlight the limitation.
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) a | X != a }}", "{a}",
false,
"{ }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X = a }}", "{a}",
false,
"{ }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) a | X != a }}", "{b}",
false,
"{{(on X) a | X != a}}"),
// A singleton multiset
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{{b}}",
false,
"{{(on X) X | X != a and X != b}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}", "{ b }",
false,
"{{(on X) [ if p(X) then 1 else 0 ] | X != a and [ if p(X) then 1 else 0 ] != b }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}", "{ [ if p(Y) then 1 else 0 ] }",
false,
"{{(on X) [ if p(X) then 1 else 0 ] | X != a and X != Y }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}", "{ [ if q(Y) then 1 else 0 ] }",
false,
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}"),
// Ensure standardize apart works with factors
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}", "{ [ if p(X) then 1 else 0 ] }",
false,
"{{(on X') [ if p(X') then 1 else 0 ] | X' != a and X' != X }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}", "{ [ if q(X) then 1 else 0 ] }",
false,
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }}"),
// Basic: if S1 is a multiset and S2 is {b1,..., bm}
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ a, b, a }}", "{ b, a }",
false,
"{{ a }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c}}", "{a, b}",
false,
"{{a, b, c, c}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{c, d, a, b}",
false,
"{{a, b, c, d}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{b, c}",
false,
"{{ ( on X ) X | X != a and X != b and X != c }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ a, b, a }}", "{ b } union { a }",
false,
"{{ a }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c}}", "{ a } union { b }",
false,
"{{a, b, c, c}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{c, d} union {a, b}",
false,
"{{a, b, c, d}}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{ b } union { c }",
false,
"{{ ( on X ) X | X != a and X != b and X != c }}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{ b } union {c, d}",
false,
"{{ ( on X ) X | X != a and X != b and X != c and X != d }}"),
// Basic: if S1 is {a1,...,an} and S2 is {b1,...,bm}
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{}", "{}",
false,
"{}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{b, c}",
false,
"{a}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{A, B}", "{C}",
false,
// Note: old R_basic result:
// "if A = C then if A = B then { } else { B } else if B = C then { A } else if A = B then { A } else { A, B }"
"if A = C then if B = C then { } else { B } else if B = C then { A } else { A, B }"),
// Basic: if S1 is { Alpha | C }_I and S2 is {b1,...,bm}
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X != a }", "{a, b}",
false,
"{(on X) X | X != a and X != b}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) a | X != a }", "{a, b}",
false,
"{ }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X = a or X = b}", "{a, b}",
false,
"{ }"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) a | X != a }", "{b, c}",
false,
"{(on X) a | X != a}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X != a }", "{ }",
false,
"{(on X) X | X != a }"),
// Ensure standardize apart works
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X != a }", "{X}",
false,
"{(on X') X' | X' != a and X' != X}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) a | X != a }", "{X}",
false,
"{(on X') a | X' != a and X != a}"),
// Basic: if S1 is { Alpha | C }_I and S2 is { Alpha' | C' }_I'
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X != a }", "{(on Y) Y | Y != b }",
false,
// Note: old R_formula_simplification result, which in hindsight looks to be wrong.
//"{ ( on X ) X | X != a and | type(Y) - { b } | = 0 }"
// Note: old R_basic result:
"{ (on ) b | true }"),
// Ensure standardize apart works
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X != a }", "{(on X) X | X != b }",
false,
// Note: old R_formula_simplification result, which in hindsight looks to be wrong.
// "{ ( on X ) X | X != a and | type(X) - { b } | = 0 }"
// Note: old R_basic result:
"{ (on ) b | true }"),
// Basic: if S1 is {a1,...,an} and S2 is { Alpha | C }_I
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{(on X) X | X = b }",
false,
"{a, c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{(on X) X | X != b }",
false,
"{ b }"),
// Basic: if S1 is 'if C then Alpha else Beta'
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then {a,b,c} else {d,e,f} ", "{b, e}",
false,
"if X = a then {a, c} else {d, f}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then if Y = a then {a,b} else {c, d} else if Z = a then {e, f} else {g, h}", "{b, d, f, h}",
false,
"if X = a then if Y = a then {a} else {c} else if Z = a then {e} else {g}"),
// Basic: if S2 is 'if C then Alpha else Beta'
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d, e, f}", "if Y = g then {c} else {f}",
false,
"if Y = g then {a, b, d, e, f} else {a, b, c, d, e}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c, d, e, f}", "if Y = g then if Z = g then {b} else {c} else if Z = g then {e} else {f}",
false,
"if Y = g then if Z = g then {a, c, d, e, f} else {a, b, d, e, f} else if Z = g then {a, b, c, d, f} else {a, b, c, d, e}"),
// Basic: S1 and S2 are conditionals
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = a then {a,b,c} else {d,e,f}", "if Y = g then {c} else {f}",
false,
"if X = a then if Y = g then {a, b} else {a, b, c} else if Y = g then {d, e, f} else {d, e}"),
// Complex: if S1 is S11 union S1rest
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b} union {c, d}", "{b} union {d}",
false,
"{a} union {c}"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{(on X) X | X = a or X = b } union {(on X) X | X = c or X =d}", "{b, d}",
false,
// Note: old R_basic result:
// "{ a } union { c }"
// TODO - can we do better than this with the new R_simplify logic?
// Doing better now, but conversion from { (on ) alpha | C } to if C then { alpha } else {} breaking BP tests at the moment, so it's disabled
"{ (on ) a | true } union { (on ) c | true }"),
// Fix for failing test: use to remove the A index making it free
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialLoopyMisconceptionExample(),
"{ ( on A, A', B ) ( ([ m(A) ]), ([ if gA(A') and gB(B) then if m(A') then if m(B) then 10 else 1 else (if m(B) then 5 else 30) else 1 ]) ) | A != X and A' != B and (A = A' or A = B) and (A' != A or B != X) }",
"{ ( on B, A, B' ) ( ([ m(B) ]), ([ if gA(A) and gB(B') then if m(A) then if m(B') then 10 else 1 else (if m(B') then 5 else 30) else 1 ]) ) | X != B and A != B' and (B = A or B = B') and (A != X or B' != B) }",
false,
"{ ( on A) ( ([ m(A) ]), ([ if gA(X) and gB(A) then if m(X) then if m(A) then 10 else 1 else (if m(A) then 5 else 30) else 1 ]) ) | X != A }"),
// Multisets are only every meant to be the first operand S1 (i.e. a multiset of factors).
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{{a}}",
true,
"N/A"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{{a, a}}",
true,
"N/A"),
// S1 is a multiset but s2 is also a multiset
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{a, a, b, b, c, c, d, d}}", "{{c, d, a, b}}",
true,
"N/A"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }}", "{{b, c}}",
true,
"N/A"),
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) X | X != a }} union {{(on X) X | X != c }}", "{{b, d}}",
true,
"N/A"),
// S1 is a union of unique multisets with a non singleton
new SetDifferenceTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{(on X) [ if p(X) then 1 else 0 ] | X != a }} union { [ if q(Z) then 1 else 0 ], [ if p(Z) then 1 else 0 ]}",
"{ [ if q(Z) then 1 else 0 ] }",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testIntersection() {
class IntersectionTestData extends TestData {
private String S1, S2;
private Expression exprS1, exprS2;
public IntersectionTestData(String contextualConstraint, Model model, String S1, String S2, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.S1 = S1;
this.S2 = S2;
};
@Override
public Expression getTopExpression() {
this.exprS1 = parse(S1);
this.exprS2 = parse(S2);
return Expressions.apply("intersection", exprS1, exprS2);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_intersection, LPIUtil.argForIntersectionRewriteCall(exprS1, exprS2));
}
};
TestData[] tests = new TestData[] {
// Basic: Intersection to an empty set
new IntersectionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ (on X) [ p(X) ] | X = a}}", "{{ (on Y) [ p(Y) ] | Y = b }}",
false,
"{}"),
// Basic: Intersection to a an intensional set
new IntersectionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ (on X) [ p(X) ] | X != a}}", "{{ (on Y) [ p(Y) ] | Y != a }}",
false,
"{{ (on X, Y) [ p(X) ] | X = Y and X != a and Y != a}}"),
// Basic: standardize apart necessary
new IntersectionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ (on X) [ p(X) ] | X != a}}", "{{ (on X) [ p(X) ] | X != a }}",
false,
"{{ (on X', X) [ p(X') ] | X' = X and X' != a and X != a}}"),
// Basic: Extensional Set
new IntersectionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a, b, c}", "{a}",
false,
"{a}"),
new IntersectionTestData("Y != X and X = ann", new TrivialLoopyFriendsAnnBobAndSmokerBobExample(),
"{ ([ friends(ann, Y) ]), ([ smoker(Y) ]) }", "{ ([ friends(X, Y) ]), ([ smoker(X) ]), ([ smoker(Y) ]) }",
false,
"{ ([ friends(ann, Y) ]), ([ smoker(Y) ]) }"),
// TODO
// Basic: Conditional S1
// TODO
// Basic: Conditional S2
// TODO
new IntersectionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{{ (on X) X | X != a}}", "{ (on Y) Y | Y != a }",
true,
"N/A")
};
perform(tests);
}
@Test
public void testSum() {
class SumTestData extends TestData {
private String N, E, Pi, T, beingComputed;
private Expression exprN, exprE, exprPi, exprT, exprBeingComputed;
private int expectedNumberOfComputedMessages = -1;
private LBPConfiguration configuration = LBPFactory.newLBPConfiguration();
public SumTestData(String contextualConstraint, Model model, String N, String E, String Pi, String T, String beingComputed, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.N = N;
this.E = E;
this.Pi = Pi;
this.T = T;
this.beingComputed = beingComputed;
};
public SumTestData(String contextualConstraint, Model model, String N, String E, String Pi, String T, String beingComputed, boolean illegalArgumentTest, String expected, int expectedNumberOfComputedMessages) {
this(contextualConstraint, model, N, E, Pi, T, beingComputed, illegalArgumentTest, expected);
this.expectedNumberOfComputedMessages = expectedNumberOfComputedMessages;
}
@Override
public Expression getTopExpression() {
this.exprN = parse(N);
this.exprE = parse(E);
this.exprPi = parse(Pi);
this.exprT = parse(T);
this.exprBeingComputed = parse(beingComputed);
Expression ETimesPi = Times.make(Arrays.asList(exprE, exprPi));
Expression sumMultiset = IntensionalSet.makeMultiSet(exprN, ETimesPi, Expressions.TRUE);
return Expressions.apply(FunctorConstants.SUM, sumMultiset);
}
@Override
public Expression callRewrite(RewritingProcess process) {
configuration.setSumRewriterTestMessageCounter(0);
RewritingProcess lbpProcess = LBPFactory.newBoundLBPProcess(process.getRootExpression(), configuration, process);
return lbpProcess.rewrite(LBPRewriter.R_sum, LPIUtil.argForSumRewriteCall(exprN, exprE, exprPi, exprT, exprBeingComputed));
}
@Override
public String perform(int i) {
String errorMessage = super.perform(i);
if (errorMessage != null) {
return errorMessage;
}
if (expectedNumberOfComputedMessages != -1) {
if (expectedNumberOfComputedMessages != configuration.getSumRewriterTestMessageCounter()) {
return "Expected " + expectedNumberOfComputedMessages + " messages to be computed but " + configuration.getSumRewriterTestMessageCounter() + " were actually computed.";
}
}
return null;
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
// Use a logical variable name in the model
new SumTestData(Expressions.TRUE.toString(), new TrivialPQWithPriors(),
"{ [ q(X) ] }",
"if p(X) and q(X) then 1 else 0",
"product({{ ( on V' in { ([ q(X) ]) } ) message to [ if p(X) and q(X) then 1 else 0 ] from V' }})",
"[ p(X) ]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(X) then 1 else 0",
0 /* given determinism, there is no need to compute the incoming message */),
// Use a logical variable name not in the model
new SumTestData(Expressions.TRUE.toString(), new TrivialPQWithPriors(),
"{ [ q(W) ] }",
"if p(W) and q(W) then 1 else 0",
"product({{ ( on V' in { ([ q(W) ]) } ) message to [ if p(W) and q(W) then 1 else 0 ] from V' }})",
"[ p(W) ]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(W) then 1 else 0",
0),
new SumTestData(Expressions.TRUE.toString(), new WeightedPQWithPriors(),
"{ [ q(X) ] }",
"if p(X) and q(X) then 0.6 else 0.4",
"product({{ ( on V' in { ([ q(X) ]) } ) message to [ if p(X) and q(X) then 0.6 else 0.4 ] from V' }})",
"[ p(X) ]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(X) then 0.46 else 0.4",
1 /* lack of determinism makes computing the message necessary */),
new SumTestData(Expressions.TRUE.toString(), new TrivialPQRWithPriors(),
"{ [ q(X) ], [ r(X) ] }",
"if p(X) and q(X) and r(X) then 0.6 else 0.4",
"product({{ ( on V' in { [ q(X) ], [ r(X) ] } ) message to [ if p(X) and q(X) and r(X) then 0.6 else 0.4 ] from V' }})",
"[ p(X) ]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"0.40",
1 /* determinism on q makes it unnecessary to compute message on r*/),
// Basic: Loopy Models
/*TODO - Loopy model, currently not supported.
new SumTestData(new TrivialLoopyPQWithPriors(),
"{ [ q(Y) ] }",
"if p(X) and q(Y) then 1 else 0",
"product({{ ( on V' in { ([ q(Y) ]) } ) 'message to . from .'([ if p(X) and q(Y) then 2 else 1 ], V') }})",
false,
"if p(X) then 0.3 else 0"),
*/
};
perform(tests);
}
@Test
public void testMessageToFactorFromVariable() {
class MsgToFFromVTestData extends TestData {
private String msgToF_V, beingComputed;
private Expression exprMsgToF_V, exprBeingComputed;
public MsgToFFromVTestData(String contextualConstraint, Model model, String msgToF_V, String beingComputed, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.msgToF_V = msgToF_V;
this.beingComputed = beingComputed;
};
@Override
public Expression getTopExpression() {
this.exprMsgToF_V = parse(msgToF_V);
this.exprBeingComputed = parse(beingComputed);
return this.exprMsgToF_V;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_m_to_f_from_v,
LPIUtil.argForMessageToFactorFromVariableRewriteCall(this.exprMsgToF_V, this.exprBeingComputed));
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
new MsgToFFromVTestData(Expressions.TRUE.toString(),
new Model(
"union( {{ (on X) [if p(X) then 0.2 else 0.3] }}, {{ [if p(a) then 1 else 0] }} )",
"p"
),
"message to [if p(a) then 1 else 0] from [p(a)]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(a) then 0.2 else 0.3"
),
new MsgToFFromVTestData(Expressions.TRUE.toString(),
new Model(
"union( {{ [if p(X) then 0.2 else 0.3] }}, {{ [if p(a) then 1 else 0] }} )",
"p"
),
"message to [if p(a) then 1 else 0] from [p(a)]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
// Note: old R_basic result
// "if X = a then if p(a) then 0.2 else 0.3 else 1"
// Note: no constraint applier used in R_simplify so p(X) instead of p(a).
"if X = a then if p(a) then 0.20 else 0.30 else 1"
),
// From ALBPTest.testMessageToFactorFromVariable()
new MsgToFFromVTestData(Expressions.TRUE.toString(),
new TrivialPQWithPriors(),
"message to [if p(X) and q(X) then 1 else 0] from [p(X)]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(X) then 0.2 else 0.8"),
new MsgToFFromVTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQWithPriors(),
"message to [if p(X) and q(Y) then 2 else 1] from [if p(X) and q(Y) then 1 else 0]",
LPIUtil.createNewBeingComputedExpression().toString(),
true,
"N/A"),
};
perform(tests);
}
@Test
public void testNeighboursRandomVariable() {
class NRVTestData extends TestData {
private String neighV;
private Expression exprNeighV;
public NRVTestData(String contextualConstraint, Model model, String neighV, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.neighV = neighV;
};
@Override
public Expression getTopExpression() {
this.exprNeighV = parse(neighV);
return this.exprNeighV;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_neigh_v, exprNeighV);
}
};
TestData[] tests = new TestData[] {
// Basic: Simple tests
new NRVTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([p(z)])",
false,
"{{ ( on X, Y ) ([ if p(X) or p(Y) then 1 else 0 ]) | X = z or Y = z }}"),
new NRVTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([p(b)])",
false,
"{{ ( on X, Y ) ([if p(b) and q(X, Y) and r then 1 else 0]) | X != a }} union {{ ( on X, Y ) ([if p(X) or p(Y) then 1 else 0]) | X = b or Y = b }}"),
new NRVTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([p(Z)])",
false,
"{{ ( on X, Y ) ([if p(b) and q(X, Y) and r then 1 else 0]) | X != a and Z = b }} union {{ ( on X, Y ) ([if p(X) or p(Y) then 1 else 0]) | Z = X or Z = Y }}"),
new NRVTestData(Expressions.TRUE.toString(), new TrivialLoopyPQWithPriors(),
"Neigh([p(Z)])",
false,
"{{ ( on Y ) ([ if p(Z) and q(Y) then 2 else 1 ]) }} union {{ ([ if p(Z) then 0.2 else 0.8 ]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new TrivialLoopyPQWithPriors(),
"Neigh([q(Z)])",
false,
"{{ ( on X ) ([ if p(X) and q(Z) then 2 else 1 ]) }} union {{ ([ if q(Z) then 0.3 else 0.7 ]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new IntensionalFanIn(),
"Neigh([p])",
false,
"{{ ( on X in People ) ([if q(X) then if p then 1 else 0 else if p then 0 else 1]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new IntensionalFanIn(),
"Neigh([p])",
false,
"{{ ( on X in People ) ([if q(X) then if p then 1 else 0 else if p then 0 else 1]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new IntensionalFanIn(),
"Neigh([q(a)])",
false,
"{{ ([ if q(a) then if p then 1 else 0 else if p then 0 else 1 ]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new IntensionalFanIn(),
"Neigh([q(a1)])",
false,
"{{ ([ if q(a1) then if p then 1 else 0 else if p then 0 else 1 ]), ([ if q(a1) then 1 else 0 ]) }}"),
new NRVTestData(Expressions.TRUE.toString(), new IntensionalFanIn(),
"Neigh([q(Z)])",
false,
// Note: old R_basic result:
// "if Z = a1 then {{ ([ if q(a1) then if p then 1 else 0 else if p then 0 else 1 ]), ([ if q(a1) then 1 else 0 ]) }} else if Z = a2 then {{ ([ if q(a2) then if p then 1 else 0 else if p then 0 else 1 ]), ([ if q(a2) then 1 else 0 ]) }} else {{ ([ if q(Z) then if p then 1 else 0 else if p then 0 else 1 ]) }}"
// Note: no constraint applier used anymore so q(Z) instead of q(a1)
"if Z = a1 then {{ ([ if q(a1) then if p then 1 else 0 else if p then 0 else 1 ]), ([ if q(a1) then 1 else 0 ]) }} else if Z = a2 then {{ ([ if q(a2) then if p then 1 else 0 else if p then 0 else 1 ]), ([ if q(a2) then 1 else 0 ]) }} else {{ ([ if q(Z) then if p then 1 else 0 else if p then 0 else 1 ]) }}"),
// Not a Random Variable in the model
new NRVTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([m(a)])",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testNeighboursOfRandomVariableInParfactor() {
class NRVIPFTestData extends TestData {
private String Ev, PF;
private Expression exprEv, exprPF;
public NRVIPFTestData(String contextualConstraint, Model model, String Ev, String PF, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.Ev = Ev;
this.PF = PF;
};
@Override
public Expression getTopExpression() {
this.exprEv = parse(Ev);
this.exprPF = parse(PF);
return Expressions.make("in", exprEv, exprPF);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_neigh_v_parf,
LPIUtil.argForNeighborsOfRandomVariableInParfactorRewriteCall(exprEv, exprPF));
}
};
TestData[] tests = new TestData[] {
// Basic: Trivial argument cases
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{ }",
false,
"{ }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{{ }}",
false,
"{ }"),
// Basic: Extensional Parfactor cases
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a1)]", "{ [if p(a1) then 1 else 0] }",
false,
"{ [if p(a1) then 1 else 0] }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a1)]", "{{ [if p(a1) then 1 else 0] }}",
false,
"{{ [if p(a1) then 1 else 0] }}"),
//Note: Currently when normailized will create a comparison of the parfactors in a conditional, which is currently
// not supported. This is currently determined an edge case that should not occur as the model is usually intensional.
//new NRVIPFTestData(new TrivialPQR(), "[p(a1)]", "union({ [if p(a1) then 1 else 0]}, {[if p(a1) and q(a1) then 1 else 0] })", false, "union([if p(a1) then 1 else 0], [if p(a1) and q(a1) then 1 else 0])"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a1)]", "{{ [if p(a1) then 1 else 0], [if p(a1) and q(a1) then 1 else 0] }}",
false,
"{{ ([ if p(a1) then 1 else 0 ]), ([ if p(a1) and q(a1) then 1 else 0 ]) }}"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{ [if p(a1) then 1 else 0] }",
false,
"if X = a1 then {[if p(a1) then 1 else 0]} else { }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{{ [if p(a1) then 1 else 0] }}",
false,
"if X = a1 then {{[if p(a1) then 1 else 0]}} else { }"),
// Basic: Intensional Parfactor cases
// No change expected
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ (on X in {a,b,c}) [if p(a) then 1 else 0] | X != d }",
false,
"{ (on X in {a,b,c}) [if p(a) then 1 else 0] | X != d }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X in {a,b,c}) [if p(a) then 1 else 0] | X != d }}",
false,
"{{ (on X in {a,b,c}) [if p(a) then 1 else 0] | X != d }}"),
// and addition expected
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ (on X in {a,b,c}) [if p(X) then 1 else 0] | X != d }",
false,
"{ ([if p(a) then 1 else 0]) }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X in {a,b,c}) [if p(X) then 1 else 0] | X != d }}",
false,
"{{ ([if p(a) then 1 else 0]) }}"),
// and addition or expected
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ (on X in {a,b,c}, Y in {d, e, f}) [if p(X) and p(Y) then 1 else 0] | X != d }",
false,
// Note: old R_formula_simplification result
// "{ (on X in {a,b,c}, Y in {d,e,f}) [if p(X) and p(Y) then 1 else 0] | X = a or (X != d and Y = a) }"
"{ (on X in {a,b,c}, Y in {d,e,f}) [if p(X) and p(Y) then 1 else 0] |X != d and (X = a or Y = a) }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X in {a,b,c}, Y in {d, e, f}) [if p(X) and p(Y) then 1 else 0] | X != d }}",
false,
// Note: old R_formula_simplification result
// "{{ (on X in {a,b,c}, Y in {d,e,f}) [if p(X) and p(Y) then 1 else 0] | X = a or (X != d and Y = a) }}"
"{{ (on X in {a,b,c}, Y in {d,e,f}) [if p(X) and p(Y) then 1 else 0] | X != d and (X = a or Y = a) }}"),
// false C
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ (on X in {a,b,c}) [if p(a) then 1 else 0] | false }",
false,
"{ }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X in {a,b,c}) [if p(a) then 1 else 0] | false }}",
false,
"{ }"),
// I is empty
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ [if p(a) then 1 else 0] | true }",
false,
"{ [if p(a) then 1 else 0] }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ [if p(a) then 1 else 0] | true }}",
false,
"{{ [if p(a) then 1 else 0] }}"),
// if C is (C' and i = Beta) for i an index in I
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{ (on X) [if p(X) then 1 else 0] | Z = X }",
false,
// Note: old R_formula_simplification result
// "{ ( on X ) ([ if p(X) then 1 else 0 ]) | X = Z = a }"
"if Z = a then { ([ if p(a) then 1 else 0 ]) } else { }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X) [if p(X) then 1 else 0] | Z = X }}",
false,
// Note: old R_formula_simplification result
// "{{ ( on X ) ([ if p(X) then 1 else 0 ]) | X = Z = a }}"
"if Z = a then {{ ([ if p(a) then 1 else 0 ]) }} else { }"),
// Standardize Apart
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{ (on X in {a,b,c}) [if p(X) then 1 else 0] | X != d }",
false,
"if X != d then { ([ if p(X) then 1 else 0 ]) } else { }"),
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "{{ (on X in {a,b,c}) [if p(X) then 1 else 0] | X != d }}",
false,
"if X != d then {{ ([ if p(X) then 1 else 0 ]) }} else { }"),
// Not a Random Variable in the model
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[X]", "{ }",
true,
"N/A"),
// Not a Parfactor
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(X)]", "+(1, 2)",
true,
"N/A"),
// Head clause of Intensional set is not a factor
new NRVIPFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(a)]", "{{ (on X) if p(X) then 1 else 0 | Z = X }}",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testUnion() {
class UnionTestData extends TestData {
private String U;
private Expression exprU;
public UnionTestData(String contextualConstraint, Model model, String U, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.U = U;
}
@Override
public Expression getTopExpression() {
this.exprU = parse(U);
return exprU;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_union, exprU);
}
};
TestData[] tests = new TestData[] {
// Basic: Simple argument cases
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union()",
false,
"{}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({})",
false,
"{}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({a})",
false,
"{a}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({a,b,c})",
false,
"{a,b,c}"),
// Basic: Remove empty set cases
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{} union {a}",
false,
"{a}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a} union {}",
false,
"{a}"),
// Basic: Externalize Conditionals
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"if X = Y then {a} union {} else {} union {b}",
false,
"if X = Y then {a} else {b}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union(if X = Y then {a} union {} else {} union {b})",
false,
"if X = Y then {a} else {b}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union(if X = Y then {a} union {} else {} union {b}, {c})",
false,
"if X = Y then { a, c } else { b, c }"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({c}, if X = Y then {a} union {} else {} union {b})",
false,
"if X = Y then { c, a } else { c, b }"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union(if X = Y then {a} union {} else {} union {b}, if R = Z then {c} union {} else {} union {d})",
false,
"if X = Y then if R = Z then { a, c } else { a, d } else if R = Z then { b, c } else { b, d }"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union(if X = Y then {a} union (if R = Z then {d} else {e}) else (if M = N then {f} else {g}) union {b}, {c})",
false,
"if X = Y then if R = Z then { a, c, d } else { a, c, e } else if M = N then { f, b, c } else { g, b, c }"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({c}, if X = Y then {a} union (if R = Z then {d} else {e}) else (if M = N then {f} else {g}) union {b})",
false,
"if X = Y then if R = Z then { c, a, d } else { c, a, e } else if M = N then { c, f, b } else { c, g, b }"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"(if X = Y then { a } union (if R = Z then { d } else { e }) else (if M = N then { f } else { g }) union { b }) union (if K = L then {c} union (if J = P then {j} else {k}) else (if G = H then {m} else {n}) union {h})",
false,
"if X = Y then if R = Z then if K = L then if J = P then { a, d, c, j } else { a, d, c, k } else if G = H then { a, d, m, h } else { a, d, n, h } else if K = L then if J = P then { a, e, c, j } else { a, e, c, k } else if G = H then { a, e, m, h } else { a, e, n, h } else if M = N then if K = L then if J = P then { f, b, c, j } else { f, b, c, k } else if G = H then { f, b, m, h } else { f, b, n, h } else if K = L then if J = P then { g, b, c, j } else { g, b, c, k } else if G = H then { g, b, m, h } else { g, b, n, h }"),
// Basic: Test unflattened unions, that the code can handle
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union(union({a}, {b}), {c})",
false,
"{a, b, c}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"union({a}, union(union({b}, {c})))",
false,
"{a, b, c}"),
// Basic: Test R_basic on Random Variables call.
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"{{ ( on X, Y ) ([ if p(b) and q(X, Y) and r then 1 else 0 ]) | X != a }} union {{ ( on X, Y ) ([ if p(X) or p(Y) then 1 else 0 ]) | [ p(b) ] = [ p(X) ] or [ p(b) ] = [ p(Y) ] }}",
false,
"{{ ( on X, Y ) ([ if p(b) and q(X, Y) and r then 1 else 0 ]) | X != a }} union {{ ( on X, Y ) ([ if p(X) or p(Y) then 1 else 0 ]) | X = b or Y = b }}"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"1",
true,
"N/A"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a} union 1",
true,
"N/A"),
new UnionTestData(Expressions.TRUE.toString(), new TrivialPQ(),
"{a} union {b} union 1",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testProductFactor() {
class PFTestData extends TestData {
private String Pi, beingComputed;
private Expression exprPi, exprBeingComputed;
private Map<Object, Object> globalObjects;
public PFTestData(String contextualConstraint, Model model, String Pi, String beingComputed, boolean illegalArgumentTest, String expected) {
this(contextualConstraint, model, Pi, beingComputed, null, illegalArgumentTest, expected);
};
public PFTestData(String contextualConstraint, Model model, String Pi, String beingComputed, Map<Object, Object> globalObjects, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.Pi = Pi;
this.beingComputed = beingComputed;
this.globalObjects = globalObjects;
};
@Override
public Expression getTopExpression() {
this.exprPi = parse(Pi);
this.exprBeingComputed = parse(beingComputed);
return this.exprPi;
}
@Override
public Expression callRewrite(RewritingProcess process) {
if (globalObjects != null) {
process.getGlobalObjects().putAll(globalObjects);
}
return process.rewrite(LBPRewriter.R_prod_factor,
LPIUtil.argForProductFactorRewriteCall(exprPi, exprBeingComputed));
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in { }) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"1"),
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in { [if r and (p(X) or not p(X)) then 0.2 else 0.3] } ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.4 else 0.6"), // this is ((if r then 0.2 else 0.3) + (if r then 0.2 else 0.3))
// the summation comes from summing p(X) out (a boolean variable).
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in {{ (on X) [if r and (p(X) or not p(X)) then 0.2 else 0.3] | X = a or X = b }} ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.16 else 0.36"), // this is ((if r then 0.2 else 0.3) + (if r then 0.2 else 0.3)) ^ 2
// the summation comes from summing p(X) out (a boolean variable)
// and the exponentiation comes from the two instances of the factor, one for X = a and the other for X = b.
// The point of the irrelevant p(X) in the factor above is so that the factor is really on r alone, so the message is easily predictable.
// p(X) is included so that we have more than one instance of the factor (two, to be exact)
// so we can test the exponentiation of the potentials.
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in {{ [if r and (p(a) or not p(a)) then 0.2 else 0.3] }} union {{ [if r and (p(b) or not p(b)) then 0.2 else 0.3] }} ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.16 else 0.36"),
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in {{ [if r and (p(X) or not p(X)) then 0.2 else 0.3] }} union {{ [if r and (p(Y) or not p(Y)) then 0.2 else 0.3] }} ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.16 else 0.36"),
// Ensure union with a singleton uniset allowed.
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in {{ [if r and (p(X) or not p(X)) then 0.2 else 0.3] }} union { [if r and (p(Y) or not p(Y)) then 0.2 else 0.3] } ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.16 else 0.36"),
// Ensure union with a singleton uniset and emptyset allowed.
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in {{ [if r and (p(X) or not p(X)) then 0.2 else 0.3] }} union { [if r and (p(Y) or not p(Y)) then 0.2 else 0.3] } union {} ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.16 else 0.36"),
new PFTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"product({{ (on F in { [if p(b) and q(a,b) and r then 1 else 0] }) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 1 else 0"),
new PFTestData(Expressions.TRUE.toString(),
new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in (if X = Y then { [if r and (p(X) or not p(X)) then 0.2 else 0.3] } else { }) ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if X = Y then if r then 0.4 else 0.6 else 1"),
new PFTestData(Expressions.TRUE.toString(),
new Model(
"union(" +
"{{ (on X,Y) [if p(X) and q(X,Y) then 2 else 3] }}," +
"{{ (on Y) [if q(a,Y) then 10 else 20] }}" +
")",
"p", "q"
),
"product({{ (on F in {{ (on Y) [if p(Z) and q(Z,Y) then 2 else 3] }} ) (message to [p(Z)] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
// Note: Universe is the default sort name associated with a model that doesn't specify any.
Util.map(parse("| Universe |"), parse("2")),
false,
// Note: old R_basic result:
// "if Z = a then if p(a) then 6400 else 8100 else if p(Z) then 25 else 36"
// Note: no constraint applier used in R_simplify so p(Z) instead of p(a).
"if Z = a then if p(a) then 6400 else 8100 else if p(Z) then 25 else 36"
// This tests conditionals inside the message that do not depend on the indices of the product.
// for Z = a, message on q(a,Y) is if q(a,y) then 10 else 20.
// Then we sum over q and get 2*10 + 3*20 for true p and 3*10 + 3*20 for false p
// That gives us 80 and 90, which are then squared to 6400 and 8100.
// For Z != a, we get message 1 on q.
// Then we sum over q and get 2*1 + 3*1 for true p and 3*1 + 3*1 for false p
// That gives us 5 and 6, which are then squared to 25 and 36.
),
new PFTestData(Expressions.TRUE.toString(),
new Model(
"union(" +
"{{ (on X) [if p and q(X) then 2 else 3] }}," +
"{{ [if q(a) then 10 else 20] }}" +
")",
"p", "q"
),
"product({{ (on F in {{ (on X) [if p and q(X) then 2 else 3] }} ) (message to [p] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
// Note: here we override based on 'type(X)' directly instead of using 'Universe'
Util.map(parse("|type(X)|"), parse("3")),
false,
"if p then 2000 else 3240"
// This tests conditionals inside the message that *do* depend on the indices of the product.
// Message from [if p and q(X) then 2 else 3] to [p] will be:
// if X = a then if p then 80 else 90 else if p then 5 else 6
// (this comes from 10*2 + 20*3 and 10*3 + 20*3 and 2 + 3 and 3 + 3)
// Then product will be split into
// (if p then 80 else 90)^|X=a| * (if p then 5 else 6)^|X != a|
// (if p then 80 else 90) * (if p then 5 else 6)^2
// (if p then 80 else 90) * (if p then 25 else 36)
// if p then 2000 else 3240
),
new PFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"product({{ (on F in { [if r and (p(X) or not p(X)) then 0.2 else 0.3] } union { [if r and (p(Y) or not p(Y)) then 0.2 else 0.3], [if r and (p(Y) or not p(Y)) then 0.2 else 0.3] } ) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
true, // not defined on union of non empty or unisets.
"N/A"),
};
perform(tests);
}
@Test
public void testMessageToVariableFromFactor() {
class MsgToVFromFTestData extends TestData {
private String msgToV_F, beingComputed;
private Expression exprMsgToV_F, exprBeingComputed;
public MsgToVFromFTestData(String contextualConstraint, Model model, String msgToV_F, String beingComputed, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.msgToV_F = msgToV_F;
this.beingComputed = beingComputed;
};
@Override
public Expression getTopExpression() {
this.exprMsgToV_F = parse(msgToV_F);
this.exprBeingComputed = parse(beingComputed);
return this.exprMsgToV_F;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_m_to_v_from_f,
LPIUtil.argForMessageToVariableFromFactorRewriteCall(this.exprMsgToV_F, this.exprBeingComputed));
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
// From ALBPTest.testMessageToVariableFromFactorSingleStep()
new MsgToVFromFTestData(Expressions.TRUE.toString(),
new Model(
"union(" +
"{{(on X) [if p(X) and q(X) then 0.2 else 0.3]}}" + ")",
"p", "q"
),
"message to [p(X)] from [if p(X) and q(X) then 0.2 else 0.3]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if p(X) then 0.5 else 0.6"),
// From ALBPTest.testBelief()
new MsgToVFromFTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"message to [intelligent(X)] from [if intelligent(X) <=> not unintelligent(X) then 1 else 0]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"1"),
new MsgToVFromFTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"message to [tall(X)] from [if tall(X) then 2 else 8]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if tall(X) then 2 else 8"),
new MsgToVFromFTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"message to [tall(X)] from [if tall(X) and american(X) then 7 else 1]",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if tall(X) then 8 else 2"),
new MsgToVFromFTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"message to [if p(X) then 0.6 else 0.4] from [if p(X) then 0.6 else 0.4]",
LPIUtil.createNewBeingComputedExpression().toString(),
true,
"N/A"
),
};
perform(tests);
}
@Test
public void testRandomVariableIsReferencedByExpression() {
class RVReferencedTestData extends TestData {
private String V, E;
private Expression exprV, exprE, topE;
public RVReferencedTestData(String contextualConstraint, Model model, String V, String E, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.V = V;
this.E = E;
};
@Override
public Expression getTopExpression() {
this.exprV = parse(V);
this.exprE = parse(E);
topE = Expressions.make(LPIUtil.FUNCTOR_NEIGHBORS_OF_FROM, exprV, exprE);
return topE;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return LPIUtil.randomVariableIsReferencedByExpression(exprV, exprE, process);
}
};
TestData[] tests = new TestData[] {
// Basic: Known true
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(fred)]", "if p(fred) and q(tom, jerry) and r then 1 else 0",
false,
"true"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(Z)]", "if p(Z) and q(X,Y) and r then 1 else 0",
false,
"true"),
// Basic: Known false
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(fred)]", "if p(tom) and q(tom, jerry) and r then 1 else 0",
false,
"false"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(Z)]", "if A then B else C",
false,
"false"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(Z)]", "if [A] then [B] else [C]",
false,
"false"),
// Are false because subexpressions of a bracketed expression are just the Variables
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(fred)]", "[if p(fred) and q(tom, jerry) and r then 1 else 0]",
false,
"false"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(Z)]", "[if p(Z) and q(X,Y) and r then 1 else 0]",
false,
"false"),
// Basic: Undetermined.
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(W)]", "if p(Z) and q(X,Y) and r then 1 else 0",
false,
"W = Z"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[p(W)]", "if p(Z) and p(X) and r then 1 else 0",
false,
"W = Z or W = X"),
// Basic: 0 arity random variable.
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[r]", "if A then 1 else 0",
false,
"false"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[r]", "if p(Z) and q(X,Y) and r then 1 else 0",
false,
"true"),
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[r]", "if p(Z) and q(X,Y) then 1 else 0",
false,
"false"),
// s is not a random variable in this model.
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[s(fred)]", "if s(fred) and q(tom, jerry) and r then 1 else 0",
true,
"N/A"),
// Not bracketed, i.e. is the value expression for the random variable.
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"p(fred)", "if p(fred) and q(tom, jerry) and r then 1 else 0",
true,
"N/A"),
// the Variable A is not a Random Variable.
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"A", "if p(fred) and q(tom, jerry) and r then 1 else 0",
true,
"N/A"),
// Not allowed to use a Variable in place of the random variable's value expression
new RVReferencedTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"[ A ]", "if p(fred) and q(tom, jerry) and r then 1 else 0",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testIntensionalSimplification() {
class IntensionalSimplificationTestData extends TestData {
private String S;
private Expression exprS;
public IntensionalSimplificationTestData(String contextualConstraint, Model model, String S, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.S = S;
};
@Override
public Expression getTopExpression() {
this.exprS = parse(S);
return exprS;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_intensional_simplification, exprS);
}
};
TestData[] tests = new TestData[] {
// Basic: No changes expected for intensional unisets
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X) f(X) | X != a}",
false,
"{(on X) f(X) | X != a}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | X != a}",
false,
"{(on X,Y) f(X, Y) | X != a}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | (M = N) and (a = X = R)}",
false,
"{(on X,Y) f(X, Y) | M = N and a = X = R}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | (a = X = R) and (M = N)}",
false,
"{(on X,Y) f(X, Y) | a = X = R and M = N}"),
// Basic: No changes expected for intensional multisets
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X) f(X) | X != a}}",
false,
"{{(on X) f(X) | X != a}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | X != a}}",
false,
"{{(on X,Y) f(X, Y) | X != a}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | (M = N) and (a = X = R)}}",
false,
"{{(on X,Y) f(X, Y) | M = N and a = X = R}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | (a = X = R) and (M = N)}}",
false,
"{{(on X,Y) f(X, Y) | a = X = R and M = N}}"),
// Basic: Simple argument cases for intensional unisets.
// if C is false"
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X) f(X) | false}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X) f(X) | a = b}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{f(X) | false}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{f(X) | a = b}",
false,
"{}"),
// if I is empty
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{f(X) | a = a}",
false,
"{f(X)}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{f(X) | Y = Z}",
false,
"if Y = Z then {f(X)} else {}"),
// if C is (C' and i = Beta) for i an index in I
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X) f(X) | Z = X}",
false,
"{f(Z)}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X) | Z = X}",
false,
"{(on Y) f(Z)}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | Z = X}",
false,
"{(on Y) f(Z, Y)}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | a = X}",
false,
"{(on Y) f(a, Y)}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | (M = N) and (a = X)}",
false,
"{(on Y) f(a, Y) | M = N}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | (a = X) and (M = N)}",
false,
"{(on Y) f(a, Y) | M = N}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | and(a = X, M = N, K = L)}",
false,
"{(on Y) f(a, Y) | M = N and K = L}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | and(M = N, a = X, K = L)}",
false,
"{(on Y) f(a, Y) | M = N and K = L}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{(on X,Y) f(X, Y) | and(M = N, K = L, a = X)}",
false,
"{(on Y) f(a, Y) | M = N and K = L}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"{(on X in {a,b,c}) [if p(X) then 1 else 0] | X = a }",
false,
"{ ([ if p(a) then 1 else 0 ]) }"),
// Basic: Simple argument cases for intensional multisets.
// if C is false"
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X) f(X) | false}}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X) f(X) | a = b}}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{f(X) | false}}",
false,
"{}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{f(X) | a = b}}",
false,
"{}"),
// if I is empty
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{f(X) | a = a}}",
false,
"{{f(X)}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{f(X) | Y = Z}}",
false,
"if Y = Z then {{f(X)}} else {}"),
// if C is (C' and i = Beta) for i an index in I
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X) f(X) | Z = X}}",
false,
"{{f(Z)}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X) | Z = X}}",
false,
"{{(on Y) f(Z)}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | Z = X}}",
false,
"{{(on Y) f(Z, Y)}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | a = X}}",
false,
"{{(on Y) f(a, Y)}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | (M = N) and (a = X)}}",
false,
"{{(on Y) f(a, Y) | M = N}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | (a = X) and (M = N)}}",
false,
"{{(on Y) f(a, Y) | M = N}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | and(a = X, M = N, K = L)}}",
false,
"{{(on Y) f(a, Y) | M = N and K = L}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | and(M = N, a = X, K = L)}}",
false,
"{{(on Y) f(a, Y) | M = N and K = L}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{(on X,Y) f(X, Y) | and(M = N, K = L, a = X)}}",
false,
"{{(on Y) f(a, Y) | M = N and K = L}}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"{{(on X in {a,b,c}) [if p(X) then 1 else 0] | X = a }}",
false,
"{{ ([ if p(a) then 1 else 0 ]) }}"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"1",
true,
"N/A"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{a}",
true,
"N/A"),
new IntensionalSimplificationTestData(Expressions.TRUE.toString(), null,
"{{a, a, b}}",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testProductMessageAndProductFactor() {
class PMAPFTestData extends TestData {
private String m, Pi, beingComputed;
private Expression exprM, exprPi, exprBeingComputed;
public PMAPFTestData(String contextualConstraint, Model model, String m, String Pi, String beingComputed, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.m = m;
this.Pi = Pi;
this.beingComputed = beingComputed;
};
@Override
public Expression getTopExpression() {
this.exprM = parse(m);
this.exprPi = parse(Pi);
this.exprBeingComputed = parse(beingComputed);
return Times.make(Arrays.asList(new Expression[] {exprM, exprPi}));
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_prod_m_and_prod_factor,
LPIUtil.argForProductMessageAndProductFactorRewriteCall(exprM, exprPi, exprBeingComputed));
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
new PMAPFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"if r then 1 else 0",
"product({{ (on F in { [if r and (p(X) or not p(X)) then 0.2 else 0.3] }) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 1 else 0"
),
new PMAPFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"if r then 0.7 else 0.3",
"product({{ (on F in { [if r and (p(X) or not p(X)) then 0.2 else 0.3] }) (message to [r] from F) }} )",
LPIUtil.createNewBeingComputedExpression().toString(),
false,
"if r then 0.28 else 0.18" // this is (if r then 0.7 else 0.3)*(if r then 0.4 else 0.6)
),
new PMAPFTestData(Expressions.TRUE.toString(), new TrivialPRWithNonDeterministicFactor(),
"if r then 0.7 else 0.3",
"if r then 0.7 else 0.3", // invalid argument
LPIUtil.createNewBeingComputedExpression().toString(),
true,
"N/A"
),
};
perform(tests);
}
@Test
public void testNeighboursFactor() {
class NeighboursFactorTestData extends TestData {
private String neighF;
private Expression exprNeighF;
public NeighboursFactorTestData(String contextualConstraint, Model model, String neighF, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.neighF = neighF;
};
@Override
public Expression getTopExpression() {
this.exprNeighF = parse(neighF);
return this.exprNeighF;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_neigh_f, exprNeighF);
}
};
TestData[] tests = new TestData[] {
// Basic: Tests brought over from ALBPTest.testNeighborsOfFactor()
new NeighboursFactorTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([if p(X) and q(X,Y) then 1 else 0])",
false,
"{ ([ p(X) ]), ([ q(X, Y) ]) }"),
new NeighboursFactorTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([if p(X) and p(Y) and r then 1 else 0])",
false,
"if X = Y then { ([ p(X) ]), ([ r ]) } else { ([ p(X) ]), ([ p(Y) ]), ([ r ]) }"),
new NeighboursFactorTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"Neigh([if a(X) and b(X,Y) then 1 else 0])",
false,
"{ }"),
new NeighboursFactorTestData(Expressions.TRUE.toString(), new TrivialPQPeoplea1Anda2(),
"Neigh([if q(X) then if p then 1 else 0 else if p then 0 else 1])",
false,
// Ensures duplicates are not returned.
"{ ([ q(X) ]), ([ p ]) }"),
new NeighboursFactorTestData(Expressions.TRUE.toString(), new TrivialPQR(),
"if p(X) and q(X,Y) then 1 else 0",
true,
"N/A"),
};
perform(tests);
}
@Test
public void testNormalize() {
class NormalizeTestData extends TestData {
private String V, E;
private Expression exprV, exprE;
public NormalizeTestData(String contextualConstraint, Model model, String V, String E, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.V = V;
this.E = E;
};
@Override
public Expression getTopExpression() {
this.exprV = parse(V);
this.exprE = parse(E);
return Expressions.apply("normalize", exprV, exprE);
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_normalize, LPIUtil.argForNormalizeRewriteCall(exprV, exprE));
}
};
TestData[] tests = new TestData[] {
// Basic: Straight forward cases
// This rewriter assumes that messages have logical variables conditions
// on top, and separated from, random variable conditions.
// It also assumes that random variables are always instances of the
// random variable on which to normalize
// (that is, other random variables are not supposed to be present).
new NormalizeTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQandb(),
"[p(X)]",
"if something then if p(X) then 10 else 90 else 90",
false,
"if something then if p(X) then 0.1 else 0.9 else 0.5"),
new NormalizeTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQandb(),
"[p(X)]",
"1" /* could be any constant */,
false,
"0.5"),
new NormalizeTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQandb(),
"[p(X)]",
"if something then 10 else 90",
false,
"0.5"), // we get 0.5 because the expression is constant in p(X)
new NormalizeTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQandb(),
"[p(X)]",
"if X = a then if p(a) then 1 else 9 else if p(X) then 2 else 8",
false,
"if X = a then if p(a) then 0.1 else 0.9 else if p(X) then 0.2 else 0.8"),
};
perform(tests);
}
@Test
public void testBeliefForNonLoopyModels() {
class BeliefTestData extends TestData {
private String belief;
private Expression exprBelief;
private Map<Object, Object> globalObjects;
private LBPConfiguration.BeliefPropagationUpdateSchedule schedule;
public BeliefTestData(String contextualConstraint, Model model, String belief, boolean illegalArgumentTest, String expected) {
this(contextualConstraint, model, belief, null, illegalArgumentTest, expected);
};
public BeliefTestData(String contextualConstraint, Model model, String belief, Map<Object, Object> globalObjects, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.belief = belief;
this.globalObjects = globalObjects;
};
public void setUpdateSchedule(LBPConfiguration.BeliefPropagationUpdateSchedule schedule) {
this.schedule = schedule;
}
@Override
public Expression getTopExpression() {
this.exprBelief = parse(belief);
return this.exprBelief;
}
@Override
public Expression callRewrite(RewritingProcess process) {
if (globalObjects != null) {
process.getGlobalObjects().putAll(globalObjects);
}
LBPConfiguration configuration = LBPFactory.newLBPConfiguration();
configuration.setBeliefPropagationUpdateSchedule(schedule);
RewritingProcess lbpProcess = LBPFactory.newLBPProcess(process.getRootExpression(), configuration, process);
Expression belief = lbpProcess.rewrite(LBPRewriter.R_belief, exprBelief);
Expression roundedBelief = Expressions.roundToAGivenPrecision(belief, 9);
return roundedBelief;
}
};
BeliefTestData[] tests = new BeliefTestData[] {
new BeliefTestData(Expressions.TRUE.toString(),
new Model(
"union("
+ "{{(on X in People) [if sick(X) then 0.4 else 0.6]}}, "
+ "{{ [if sick(john) then 1 else 0] }}"
+ ")",
"sick"
),
"belief([sick(X)])",
false,
"if X = john then if sick(john) then 1 else 0 else if sick(X) then 0.4 else 0.6"),
new BeliefTestData(Expressions.TRUE.toString(),
new Model(
"union("
+ "{ [if epidemic then 0.1 else 0.9] }, "
+ "{{(on X in People) [if epidemic then if sick(X) then 0.4 else 0.6 else if sick(X) then 0.01 else 0.99]}}, "
+ "{{ (on X in People) [if sick(X) then 1 else 0] | X = person1 or X = person2 or X = person3 }},"
+ "{{ (on X in People) [if sick(X) then 0 else 1] | X != person1 and X != person2 and X != person3 }}"
+ ")",
"epidemic", "sick"
),
"belief([epidemic])",
// Util.map(parse("|People|"), new DefaultSymbol(20)),
false,
// Note: old R_basic result:
// "if epidemic then (0.0064 * 0.6 ^ (| People | - 3)) / (0.0064 * 0.6 ^ (| People | - 3) + 9E-7 * 0.99 ^ (| People | - 3)) else (9E-7 * 0.99 ^ (| People | - 3)) / (0.0064 * 0.6 ^ (| People | - 3) + 9E-7 * 0.99 ^ (| People | - 3))"
"if epidemic then 0.995339619 else 0.00466038114"),
new BeliefTestData(Expressions.TRUE.toString(),
new Model(
"union("
+ "{ [if epidemic then 0.1 else 0.9] }, "
+ "{{(on X in People) [if epidemic then if sick(X) then 0.4 else 0.6 else if sick(X) then 0.01 else 0.99]}}, "
+ "{{ (on X in People) [if sick(X) then 1 else 0] | X = person1 or X = person2 or X = person3 }},"
+ "{{ (on X in People) [if sick(X) then 0 else 1] | X != person1 and X != person2 and X != person3 }}" +
")",
"epidemic", "sick"
),
"belief([epidemic])",
Util.map(parse("| People |"), DefaultSymbol.createSymbol(20)),
false,
"if epidemic then 0.588128460 else 0.411871540"),
// Basic: Straight forward non-loopy (i.e. exact) cases
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQWithPriors(),
"belief([p(X)])",
false,
"if p(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQWithPriors(),
"belief([q(X)])",
false,
"if q(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialThereExistsPQWithPriors(),
"belief(['there exists Y : p(X0, Y)'(X)])",
false,
"if 'there exists Y : p(X0, Y)'(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialThereExistsPQWithPriors(),
"belief(['there exists Y : q(X0, Y)'(X)])",
false,
"if 'there exists Y : q(X0, Y)'(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new WeightedPQWithPriors(),
"belief([p(X)])",
false,
"if p(X) then 0.223300971 else 0.776699029"),
new BeliefTestData(Expressions.TRUE.toString(), new WeightedPQWithPriors(),
"belief([q(X)])",
false,
"if q(X) then 0.320388350 else 0.679611650"),
new BeliefTestData(Expressions.TRUE.toString(), new WeightedThereExistsPQWithPriors(),
"belief(['there exists Y : p(X0, Y)'(X)])",
false,
"if 'there exists Y : p(X0, Y)'(X) then 0.223300971 else 0.776699029"),
new BeliefTestData(Expressions.TRUE.toString(), new WeightedThereExistsPQWithPriors(),
"belief(['there exists Y : q(X0, Y)'(X)])",
false,
"if 'there exists Y : q(X0, Y)'(X) then 0.320388350 else 0.679611650"),
// From ALBPTest.testIntensionalFanIn()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQPeoplea1Anda2(),
"belief([ q(a1) ])",
false,
"if q(a1) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQPeoplea1Anda2(),
"belief([ q(a2) ])",
false,
"if q(a2) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQPeoplea1Anda2(),
"belief([ q(a3) ])",
false,
"if q(a3) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(),
new TrivialPQPeoplea1Anda2(),
"belief([ q(X) ])",
false,
// Note: old R_basic result:
// "if X = a1 then if q(a1) then 1 else 0 else if X = a2 then if q(a2) then 1 else 0 else if q(X) then 1 else 0"
"if X = a1 then if q(a1) then 1 else 0 else if X = a2 then if q(a2) then 1 else 0 else if q(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPQPeoplea1Anda2(),
"belief([ p ])",
false,
"if p then 1 else 0"),
// From ALBPTest.testBelief()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([tall(X)])",
false,
"0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([tall(a1)])",
false,
"0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([american(X)])",
false,
"if american(X) then 0.687500000 else 0.312500000"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([american(a1)])",
false,
"if american(a1) then 0.687500000 else 0.312500000"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([intelligent(X)])",
false,
"0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([intelligent(a1)])",
false,
"0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([unintelligent(X)])",
false,
"0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"belief([unintelligent(a1)])",
false,
"0.5"),
// From ALBPTest.testExponentiatedLifted()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then 1 / (1 + 0.8 ^ (|People| - 1)) else 0.8 ^ (|People| - 1) / (1 + 0.8 ^ (|People| - 1))"
"if epidemic then 0.881664935 else 0.118335065"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ sick(X) ])",
false,
// Note: old R_formula_simplification result before R_simplify used instead
// Difference is because | People | -> 10 and new result is this expression calculated correctly with that.
// "if X != bob then if sick(X) then (0.4 * 0.8 ^ (|People| - 2) + 0.6) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else (0.4 * 0.8 ^ (|People| - 2) + 0.4) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else 0.5"
"if X != bob then if sick(X) then 0.588166494 else 0.411833506 else 0.500000000"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ sick(ann) ])",
false,
// Note: old R_basic result:
// "if sick(ann) then (0.4 * 0.8 ^ (|People| - 2) + 0.6) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else (0.4 * 0.8 ^ (|People| - 2) + 0.4) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2))"
"if sick(ann) then 0.588166494 else 0.411833506"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ sick(bob) ])",
false,
"0.5"),
// From ALBPTest.testExponentiatedLifted2()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich() ,
"belief([rich(bob)])",
false,
// Note: old R_basic result:
// "if rich(bob) then (1 + 2 ^ |People|) ^ |Treasure| / ((1 + 2 ^ |People|) ^ |Treasure| + 1) else 1 / ((1 + 2 ^ |People|) ^ |Treasure| + 1)"
"if rich(bob) then 1 else 0.000000000000000000000000000000781198402"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([rich(X)])",
false,
// Note: old R_basic result
// "if rich(X) then (1 + 2 ^ |People|) ^ |Treasure| / ((1 + 2 ^ |People|) ^ |Treasure| + 1) else 1 / ((1 + 2 ^ |People|) ^ |Treasure| + 1)"
"if rich(X) then 1 else 0.000000000000000000000000000000781198402"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([gaveTreasureTo(X,Z,Y)])",
false,
// Note: old R_basic and R_formula_simlification result:
// "if |People| > 0 then if gaveTreasureTo(X, Z, Y) then (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else if gaveTreasureTo(X, Z, Y) then (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1))"
"if gaveTreasureTo(X, Z, Y) then 0.499512195 else 0.500487805"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([owns(X,Y)])",
false,
// Note: old R_basic and R_formula_simlification result:
//"if owns(X, Y) then (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1))"
"if owns(X, Y) then 0.999024390 else 0.000975609756"),
// From ALBPTest.testQueryVariableSplitting()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickbob(),
"belief([sick(Person)])",
false,
"if Person = bob then if sick(bob) then 0.8 else 0.2 else 0.5"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickbob(),
"belief([sick(bob)])",
false,
"if sick(bob) then 0.8 else 0.2"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickbob(),
"belief([sick(ann)])",
false,
"0.5"),
// From ALBPTest.testMultiLevelMessagePassing()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickSmokerbob(),
"belief([smoker(Person)])",
false,
// Note: old R_basic result:
// "if Person = bob then if smoker(bob) then 0.15 else 0.85 else if smoker(Person) then 0.1 else 0.9"
"if Person = bob then if smoker(bob) then 0.150943396 else 0.849056604 else if smoker(Person) then 0.100000000 else 0.900000000"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickSmokerbob(),
"belief([smoker(bob)])",
false,
"if smoker(bob) then 0.150943396 else 0.849056604"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickSmokerbob(),
"belief([sick(X)])",
false,
// Note: old R_basic result:
// "if X = bob then if sick(bob) then 1 else 0 else if sick(X) then 0.53 else 0.47"
// i.e. no constraint applier used, so sick(bob) is sick(X)
"if X = bob then if sick(bob) then 1 else 0 else if sick(X) then 0.530000000 else 0.470000000"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSickSmokerbob(),
"belief([sick(bob)])",
false,
"if sick(bob) then 1 else 0"),
// From ALBPTest.testEpidemic()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickbob(),
"belief([ epidemic ])",
false,
"if epidemic then 0.28 else 0.72"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickbob(),
"belief([ sick(X) ])",
false,
"if X = bob then if sick(bob) then 1 else 0 else if sick(X) then 0.34 else 0.66"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickbob(),
"belief([ sick(ann) ])",
false,
"if sick(ann) then 0.34 else 0.66"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickbob(),
"belief([ sick(bob) ])",
false,
"if sick(bob) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryone(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then (0.1 * 0.7 ^ |People|) / (0.1 * 0.7 ^ |People| + 0.9 * 0.2 ^ |People|) else (0.9 * 0.2 ^ |People|) / (0.1 * 0.7 ^ |People| + 0.9 * 0.2 ^ |People|)"
"if epidemic then 0.999967375 else 0.0000326248029"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryone(),
"belief([ sick(X) ])",
false,
"if sick(X) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryone(),
"belief([ sick(ann) ])",
false,
"if sick(ann) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then (0.03 * 0.7 ^ (|People| - 3)) / (0.03 * 0.7 ^ (|People| - 3) + 0.72 * 0.2 ^ (|People| - 3)) else (0.72 * 0.2 ^ (|People| - 3)) / (0.03 * 0.7 ^ (|People| - 3) + 0.72 * 0.2 ^ (|People| - 3))"
"if epidemic then 0.996283639 else 0.00371636130"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ sick(X) ])",
false,
// Note: old R_basic result:
// "if X != bob and X != mary and X != john then if sick(X) then 1 else 0 else if X = bob then if sick(bob) then 0 else 1 else if sick(X) then (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3)) / (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3) + 0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3)) else (0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3)) / (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3) + 0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3))"
// Note: the previous equation at the end would have given:
// 'if sick(X) then 0.69812117289589 else 0.30187882710411'
// when calculated. We now calculate:
// 'if sick(X) then 0.698141819 else 0.301858181'
// for the equivalent branch. The difference is because the old
// result is rounded to two decimal places and the difference
// in the answers is because of this.
"if X != bob and X != mary and X != john then if sick(X) then 1 else 0 else if X = bob then if sick(bob) then 0 else 1 else if sick(X) then 0.698141819 else 0.301858181"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ sick(ann) ])",
false,
"if sick(ann) then 1 else 0"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ sick(bob) ])",
false,
"if sick(bob) then 0 else 1"),
new BeliefTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ sick(mary) ])",
false,
// Note: old R_basic result:
// "if sick(mary) then (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3)) / (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3) + 0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3)) else (0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3)) / (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3) + 0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3))"
"if sick(mary) then 0.698141819 else 0.301858181"),
new BeliefTestData(Expressions.TRUE.toString(),
new Model(
"partition("
+ "{{(on X in People) [if smokes(X) then if cancer(X) then 0.9 else 0.1 else 1] }}, "
+ "{{ [if smokes(john) then 1 else 0] }}, "
+ "{{ (on X in People) [if smokes(X) then 0.7 else 0.3] }})",
"smokes", "cancer"
/**
* The point of this example is to test short-circuiting.
* When we query cancer(john), messages about smokes(john) comes from two different parfactors.
* The first one, [if smokes(john) then 1 else 0], is deterministic and makes the second message irrelevant.
* Therefore, this second message does not need to be computed at all.
*/
),
"belief([cancer(john)])",
false,
"if cancer(john) then 0.9 else 0.1"),
// From ALBPTest.testCSI()
new BeliefTestData(Expressions.TRUE.toString(), new TrivialSunnyAvailableCanPlayWith(),
"belief([canPlayWith(X)])",
false,
"if canPlayWith(X) then 0 else 1"),
// A model that looks loopy but isn't.
new BeliefTestData(Expressions.TRUE.toString(),
new Model(
"union(" +
"{{ (on X) [if p(X) and q(X) then 2 else 3] | X = a }}," +
"{{ (on X) [if q(X) and p(X) then 10 else 20] | X != a }}" +
")",
"p", "q"
),
"belief([p(W)])",
false,
"if W = a then if p(a) then 0.454545455 else 0.545454545 else if p(W) then 0.428571429 else 0.571428571"),
};
// Run non-loopy tests for each kind of schedule currently supported
LBPConfiguration.BeliefPropagationUpdateSchedule[] schedules = new LBPConfiguration.BeliefPropagationUpdateSchedule[] {
LBPConfiguration.BeliefPropagationUpdateSchedule.ASYNCHRONOUS_INDIVIDUAL_BASED_CYCLE_DETECTION,
LBPConfiguration.BeliefPropagationUpdateSchedule.SYNCHRONOUS
};
for (LBPConfiguration.BeliefPropagationUpdateSchedule schedule : schedules) {
System.out.println("---- RUNNING TESTS UNDER SCHEDULE : "+schedule);
for (BeliefTestData beliefTestData : tests) {
beliefTestData.setUpdateSchedule(schedule);
}
perform(tests);
}
}
@Test
public void testBeliefForNonLoopyModelsWithUnknownDomainSizes() {
class BeliefUnknownSizeTestData extends TestData {
private String belief;
private Expression exprBelief;
public BeliefUnknownSizeTestData(String contextualConstraint, Model model, String belief, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.belief = belief;
};
@Override
public Expression getTopExpression() {
this.exprBelief = parse(belief);
return this.exprBelief;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression roundedBelief = null;
Expression belief = process.rewrite(LBPRewriter.R_belief, exprBelief);
roundedBelief = Expressions.roundToAGivenPrecision(belief, 9);
return roundedBelief;
}
};
TestData[] tests = new TestData[] {
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(),
new Model(
"union("
+ "{ [if epidemic then 0.1 else 0.9] }, "
+ "{{(on X in People) [if epidemic then if sick(X) then 0.4 else 0.6 else if sick(X) then 0.01 else 0.99]}}, "
+ "{{ (on X in People) [if sick(X) then 1 else 0] | X = person1 or X = person2 or X = person3 }},"
+ "{{ (on X in People) [if sick(X) then 0 else 1] | X != person1 and X != person2 and X != person3 }}" +
")",
"epidemic", "sick"
),
"belief([epidemic])",
false,
// Note: old R_basic result:
// "if epidemic then (0.0064 * 0.6 ^ (| People | - 3)) / (0.0064 * 0.6 ^ (| People | - 3) + 9E-7 * 0.99 ^ (| People | - 3)) else (9E-7 * 0.99 ^ (| People | - 3)) / (0.0064 * 0.6 ^ (| People | - 3) + 9E-7 * 0.99 ^ (| People | - 3))"
// calculated | type(X) | = 10 :
// if epidemic then 0.995339619 else 0.00466038114
// Note:
// (| type(X) | - | type(X) | - 3) = 3
// i.e. right associative:
// | type(X) | - ( |type(X) | - 3)
"if epidemic then (0.00640000000 * 0.600000000 ^ (| type(X) | - 3)) / (0.00640000000 * 0.600000000 ^ (| type(X) | - 3) + 0.000000900000000 * 0.990000000 ^ (| type(X) | - 3)) else (0.000000900000000 * 0.990000000 ^ (| type(X) | - 3)) / (0.00640000000 * 0.600000000 ^ (| type(X) | - 3) + 0.000000900000000 * 0.990000000 ^ (| type(X) | - 3))"),
//"if epidemic then (0.1 * 0.4 ^ (| type(X) | - (| type(X) | - 3)) * 0.6 ^ (| type(X) | - 3)) / (0.1 * 0.4 ^ (| type(X) | - (| type(X) | - 3)) * 0.6 ^ (| type(X) | - 3) + 0.9 * 0.01 ^ (| type(X) | - (| type(X) | - 3)) * 0.99 ^ (| type(X) | - 3)) else (0.9 * 0.01 ^ (| type(X) | - (| type(X) | - 3)) * 0.99 ^ (| type(X) | - 3)) / (0.1 * 0.4 ^ (| type(X) | - (| type(X) | - 3)) * 0.6 ^ (| type(X) | - 3) + 0.9 * 0.01 ^ (| type(X) | - (| type(X) | - 3)) * 0.99 ^ (| type(X) | - 3))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then 1 / (1 + 0.8 ^ (|People| - 1)) else 0.8 ^ (|People| - 1) / (1 + 0.8 ^ (|People| - 1))"
// calculated | type(X) | = 10 :
// if epidemic then 0.881664935 else 0.118335065
"if epidemic then 1 / (1 + 0.8 ^ (| type(X) | - 1)) else 0.8 ^ (| type(X) | - 1) / (1 + 0.8 ^ (| type(X) | - 1))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ sick(X) ])",
false,
// Note: old R_formula_simplification result before R_simplify used instead
// Difference is because | People | -> 10 and new result is this expression calculated correctly with that.
// "if X != bob then if sick(X) then (0.4 * 0.8 ^ (|People| - 2) + 0.6) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else (0.4 * 0.8 ^ (|People| - 2) + 0.4) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else 0.5"
// calculated | type(.) | = 10 :
// if X != bob then if sick(X) then 0.588166494 else 0.411833506 else 0.500000000
"if X != bob then if sick(X) then (0.4 * 0.8 ^ (| type(X') | - 2) + 0.6) / (0.4 * 0.8 ^ (| type(X') | - 2) + 1 + 0.4 * 0.8 ^ (| type(X') | - 2)) else (0.4 * 0.8 ^ (| type(X') | - 2) + 0.4) / (0.4 * 0.8 ^ (| type(X') | - 2) + 1 + 0.4 * 0.8 ^ (| type(X') | - 2)) else 0.5"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicAndSickNotbob(),
"belief([ sick(ann) ])",
false,
// Note: old R_basic result:
// "if sick(ann) then (0.4 * 0.8 ^ (|People| - 2) + 0.6) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2)) else (0.4 * 0.8 ^ (|People| - 2) + 0.4) / (0.4 * 0.8 ^ (|People| - 2) + 1 + 0.4 * 0.8 ^ (|People| - 2))"
// calculated | type(.) | = 10 :
// if sick(ann) then 0.588166494 else 0.411833506
"if sick(ann) then (0.4 * 0.8 ^ (| type(X) | - 2) + 0.6) / (0.4 * 0.8 ^ (| type(X) | - 2) + 1 + 0.4 * 0.8 ^ (| type(X) | - 2)) else (0.4 * 0.8 ^ (| type(X) | - 2) + 0.4) / (0.4 * 0.8 ^ (| type(X) | - 2) + 1 + 0.4 * 0.8 ^ (| type(X) | - 2))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich() ,
"belief([rich(bob)])",
false,
// Note: old R_basic result:
// "if rich(bob) then (1 + 2 ^ |People|) ^ |Treasure| / ((1 + 2 ^ |People|) ^ |Treasure| + 1) else 1 / ((1 + 2 ^ |People|) ^ |Treasure| + 1)"
// calculated | type(.) | = 10 :
// if rich(bob) then 1 else 0.000000000000000000000000000000781198402
"if rich(bob) then (1 + 2 ^ | type(X) |) ^ | type(Z) | / ((1 + 2 ^ | type(X) |) ^ | type(Z) | + 1) else 1 / ((1 + 2 ^ | type(X) |) ^ | type(Z) | + 1)"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([rich(X)])",
false,
// Note: old R_basic result
// "if rich(X) then (1 + 2 ^ |People|) ^ |Treasure| / ((1 + 2 ^ |People|) ^ |Treasure| + 1) else 1 / ((1 + 2 ^ |People|) ^ |Treasure| + 1)"
// calculated | type(.) | = 10 :
// if rich(X) then 1 else 0.000000000000000000000000000000781198402
"if rich(X) then (1 + 2 ^ | type(X') |) ^ | type(Z) | / ((1 + 2 ^ | type(X') |) ^ | type(Z) | + 1) else 1 / ((1 + 2 ^ | type(X') |) ^ | type(Z) | + 1)"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([gaveTreasureTo(X,Z,Y)])",
false,
// Note: old R_basic and R_formula_simlification result:
// "if |People| > 0 then if gaveTreasureTo(X, Z, Y) then (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ (|People| - 1) * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else if gaveTreasureTo(X, Z, Y) then (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1))"
// calculated | type(.) | = 10 :
// if gaveTreasureTo(X, Z, Y) then 0.499512195 else 0.500487805
// Note: CardinaliltyOfTypeAlwaysDistinctFromZero removes
// the top conditional as its true in this context
"if gaveTreasureTo(X, Z, Y) then (2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1)) / (2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1) + 1 + (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1) + 2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1)) else (1 + (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1) + 2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1)) / (2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1) + 1 + (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1) + 2 ^ (| type(X') | - 1) * (1 + 2 ^ | type(X) |) ^ (| type(Z') | - 1))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialGaveTreasureToOwnsRich(),
"belief([owns(X,Y)])",
false,
// Note: old R_basic and R_formula_simlification result:
//"if owns(X, Y) then (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1)) else (1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1)) / (2 ^ |People| * (1 + 2 ^ |People|) ^ (|Treasure| - 1) + 1 + (1 + 2 ^ |People|) ^ (|Treasure| - 1))"
// calculated | type(.) | = 10 :
// if owns(X, Y) then 0.999024390 else 0.000975609756
"if owns(X, Y) then (2 ^ | type(X') | * (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1)) / (2 ^ | type(X') | * (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1) + 1 + (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1)) else (1 + (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1)) / (2 ^ | type(X') | * (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1) + 1 + (1 + 2 ^ | type(X') |) ^ (| type(Z) | - 1))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryone(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then (0.1 * 0.7 ^ |People|) / (0.1 * 0.7 ^ |People| + 0.9 * 0.2 ^ |People|) else (0.9 * 0.2 ^ |People|) / (0.1 * 0.7 ^ |People| + 0.9 * 0.2 ^ |People|)"
// calculated | type(.) | = 10 :
// if epidemic then 0.999967375 else 0.0000326248029
"if epidemic then (0.1 * 0.7 ^ | type(X) |) / (0.1 * 0.7 ^ | type(X) | + 0.9 * 0.2 ^ | type(X) |) else (0.9 * 0.2 ^ | type(X) |) / (0.1 * 0.7 ^ | type(X) | + 0.9 * 0.2 ^ | type(X) |)"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ epidemic ])",
false,
// Note: old R_basic result:
// "if epidemic then (0.03 * 0.7 ^ (|People| - 3)) / (0.03 * 0.7 ^ (|People| - 3) + 0.72 * 0.2 ^ (|People| - 3)) else (0.72 * 0.2 ^ (|People| - 3)) / (0.03 * 0.7 ^ (|People| - 3) + 0.72 * 0.2 ^ (|People| - 3))"
// if epidemic then 0.996283639 else 0.00371636130
// calculated | type(.) | = 10 :
// "if epidemic then (0.03 * 0.7 ^ (| type(X) | - 3)) / (0.03 * 0.7 ^ (| type(X) | - 3) + 0.72 * 0.2 ^ (| type(X) | - 3)) else (0.72 * 0.2 ^ (| type(X) | - 3)) / (0.03 * 0.7 ^ (| type(X) | - 3) + 0.72 * 0.2 ^ (| type(X) | - 3))"
"if epidemic then (0.0300000000 * 0.700000000 ^ (| type(X) | - 3)) / (0.0300000000 * 0.700000000 ^ (| type(X) | - 3) + 0.720000000 * 0.200000000 ^ (| type(X) | - 3)) else (0.720000000 * 0.200000000 ^ (| type(X) | - 3)) / (0.0300000000 * 0.700000000 ^ (| type(X) | - 3) + 0.720000000 * 0.200000000 ^ (| type(X) | - 3))"),
//"if epidemic then (0.100000000 * 0.700000000 ^ (| type(X) | - 3) * 0.300000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3)))) / (0.100000000 * 0.700000000 ^ (| type(X) | - 3) * 0.300000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3))) + 0.900000000 * 0.200000000 ^ (| type(X) | - 3) * 0.800000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3)))) else (0.900000000 * 0.200000000 ^ (| type(X) | - 3) * 0.800000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3)))) / (0.100000000 * 0.700000000 ^ (| type(X) | - 3) * 0.300000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3))) + 0.900000000 * 0.200000000 ^ (| type(X) | - 3) * 0.800000000 ^ (| type(X) | - ((| type(X) | - 1 + | type(X) | - 3) - (| type(X) | - 3))))"),
// TODO-not supported when using R_complete_simplify on checking if a branch reachable or not.
// new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
// "belief([ sick(X) ])",
// false,
// // Note: old R_basic result:
// // "if X != bob and X != mary and X != john then if sick(X) then 1 else 0 else if X = bob then if sick(bob) then 0 else 1 else if sick(X) then (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3)) / (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3) + 0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3)) else (0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3)) / (0.14 * 0.20 ^ (| People | - 3) + 0.021 * 0.70 ^ (| People | - 3) + 0.58 * 0.20 ^ (| People | - 3) + 0.0090 * 0.70 ^ (| People | - 3))"
// // calculated | type(.) | = 10 :
// // if X != bob and X != mary and X != john then if sick(X) then 1 else 0 else if X = bob then if sick(X) then 0 else 1 else if X = mary or X = john then if sick(X) then 0.698141819 else 0.301858181 else if sick(X) then 0.693556236 else 0.306443764
// "if X != bob and X != mary and X != john then if sick(X) then 1 else 0 else if X = bob then if sick(X) then 0 else 1 else if X = mary or X = john then if sick(X) then (0.144 * 0.2 ^ (| type(X') | - 3) + 0.021 * 0.7 ^ (| type(X') | - 3)) / (0.144 * 0.2 ^ (| type(X') | - 3) + 0.021 * 0.7 ^ (| type(X') | - 3) + 0.576 * 0.2 ^ (| type(X') | - 3) + 0.009 * 0.7 ^ (| type(X') | - 3)) else (0.576 * 0.2 ^ (| type(X') | - 3) + 0.009 * 0.7 ^ (| type(X') | - 3)) / (0.144 * 0.2 ^ (| type(X') | - 3) + 0.021 * 0.7 ^ (| type(X') | - 3) + 0.576 * 0.2 ^ (| type(X') | - 3) + 0.009 * 0.7 ^ (| type(X') | - 3)) else if sick(X) then (0.144 * 0.2 ^ (| type(X') | - 4) + 0.021 * 0.7 ^ (| type(X') | - 4)) / (0.144 * 0.2 ^ (| type(X') | - 4) + 0.021 * 0.7 ^ (| type(X') | - 4) + 0.576 * 0.2 ^ (| type(X') | - 4) + 0.009 * 0.7 ^ (| type(X') | - 4)) else (0.576 * 0.2 ^ (| type(X') | - 4) + 0.009 * 0.7 ^ (| type(X') | - 4)) / (0.144 * 0.2 ^ (| type(X') | - 4) + 0.021 * 0.7 ^ (| type(X') | - 4) + 0.576 * 0.2 ^ (| type(X') | - 4) + 0.009 * 0.7 ^ (| type(X') | - 4))"),
new BeliefUnknownSizeTestData(Expressions.TRUE.toString(), new TrivialEpidemicSickEveryoneNotbobAmaryAjohn(),
"belief([ sick(mary) ])",
false,
// Note: old R_basic result:
// "if sick(mary) then (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3)) / (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3) + 0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3)) else (0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3)) / (0.14 * 0.2 ^ (| People | - 3) + 0.021 * 0.7 ^ (| People | - 3) + 0.58 * 0.2 ^ (| People | - 3) + 0.009 * 0.7 ^ (| People | - 3))"
// calculated | type(.) | = 10 :
// if sick(mary) then 0.698141819 else 0.301858181
"if sick(mary) then (0.144 * 0.2 ^ (| type(X) | - 3) + 0.021 * 0.7 ^ (| type(X) | - 3)) / (0.144 * 0.2 ^ (| type(X) | - 3) + 0.021 * 0.7 ^ (| type(X) | - 3) + 0.576 * 0.2 ^ (| type(X) | - 3) + 0.009 * 0.7 ^ (| type(X) | - 3)) else (0.576 * 0.2 ^ (| type(X) | - 3) + 0.009 * 0.7 ^ (| type(X) | - 3)) / (0.144 * 0.2 ^ (| type(X) | - 3) + 0.021 * 0.7 ^ (| type(X) | - 3) + 0.576 * 0.2 ^ (| type(X) | - 3) + 0.009 * 0.7 ^ (| type(X) | - 3))"),
};
PRAiSEConfiguration.setProperty(PRAiSEConfiguration.KEY_MODEL_ALL_TYPE_SIZES_KNOWN, "false");
perform(tests);
}
@Test
public void testExtractPreviousMessageSets() {
class ExtractPreviousMessageSetsTestData extends TestData {
private String beliefExpansion;
private Expression exprBeliefExpansion;
public ExtractPreviousMessageSetsTestData(String contextualConstraint, Model model, String beliefExpansion, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.beliefExpansion = beliefExpansion;
};
@Override
public Expression getTopExpression() {
this.exprBeliefExpansion = parse(beliefExpansion);
return this.exprBeliefExpansion;
}
@Override
public Expression callRewrite(RewritingProcess process) {
return process.rewrite(LBPRewriter.R_extract_previous_msg_sets, exprBeliefExpansion);
}
}
TestData[] tests = new TestData[] {
// Basic:
// An intensional set without indices should be returned
new ExtractPreviousMessageSetsTestData("X = a",
new TrivialPQ(),
"previous message to [p(a)] from [ Beta ]",
false,
"{{ (on ) ([p(a)], [Beta]) | X = a }}"),
// An intensional set without indices should be returned
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"previous message to [p(X)] from [ Alpha ]",
false,
"{{ (on ) ([p(X)], [Alpha]) }}"),
// An intensional set should be returned
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{{ (on X) previous message to [p(X)] from [ Alpha ] }}",
false,
"{{ (on X) ([p(X)], [Alpha]) }}"),
// An intensional set should be returned
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"previous message to [p(X)] from [ if p(Y) then 1 else 0 ]",
false,
"{{ (on ) ([p(X)], [if p(Y) then 1 else 0]) }}"),
// Example from pseudo-code
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"if X != a then (previous message to [p(X)] from [ Alpha ]) else (previous message to [p(a)] from [ Beta ])",
false,
"{{ (on ) ([p(X)], [Alpha]) | X != a}} union {{ (on ) ([p(a)], [Beta]) | not(X != a) }}"),
// Variants of example from pseudo-code
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"if X != a then (previous message to [p(X)] from [ Alpha ]) else 1",
false,
"{{ (on ) ([p(X)], [Alpha]) | X != a }}"),
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"if X != a then 1 else (previous message to [p(a)] from [ Beta ])",
false,
"{{ (on ) ([p(a)], [Beta]) | not (X != a) }}"),
// More than 1 level of nesting
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQR(),
"if X != a " +
"then (if Y != b then (previous message to [q(X, Y)] from [ Alpha1 ]) else 1) " +
"else (if Y != c then (previous message to [q(X, Y)] from [ Beta1 ]) else product({{ (on Y) (previous message to [q(X, Y)] from [ Beta2 ]) }} ) )",
false,
"{{ ( on ) ( ([ q(X, Y) ]), ([ Alpha1 ]) ) | X != a and Y != b }} union {{ ( on ) ( ([ q(X, Y) ]), ([ Beta1 ]) ) | not (X != a) and Y != c }} union {{ ( on Y ) ( ([ q(X, Y) ]), ([ Beta2 ]) ) | not (X != a) and not (Y != c) }}"),
// Embedded as a term in an arithmetic expression
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"if X != a then 1 else (1 + (previous message to [p(a)] from [ Beta ]))",
false,
"{{ ( on ) ( ([ p(a) ]), ([ Beta ]) ) | not (X != a) }}"),
// Embedded as a term in an exponentiation expression
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"if X != a then 1 else (previous message to [p(a)] from [ Beta ])^2",
false,
"{{ ( on ) ( ([ p(a) ]), ([ Beta ]) ) | not (X != a) }}"),
// Embedded a term in a product expression and sub conditional expression
new ExtractPreviousMessageSetsTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"product({{ (on A, D) if A != a then 1 else (previous message to [p(a)] from [ Beta ]) | A != D and (X = A or X = D) and A = X }})",
false,
"{{ ( on A, D ) ( ([ p(a) ]), ([ Beta ]) ) | A != D and (X = A or X = D) and A = X and not (A != a) }}"),
// Basic: Contextual Constraint Tests
// Test a false contextual constraint
new ExtractPreviousMessageSetsTestData(Expressions.FALSE.toString(),
new TrivialPQ(),
"previous message to [p(a)] from [ Beta ]",
false,
Rewriter.FALSE_CONTEXTUAL_CONTRAINT_RETURN_VALUE.toString()),
};
perform(tests);
}
@Test
public void testGetMessageExpansions() {
class GetMessageExpansionsTestData extends TestData {
private String msgSets;
private Expression exprMsgSets;
public GetMessageExpansionsTestData(String contextualConstraint, Model model, String msgSets, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.msgSets = msgSets;
};
@Override
public Expression getTopExpression() {
this.exprMsgSets = parse(msgSets);
return this.exprMsgSets;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression messageExpansions = LBPFactory.newMessageExpansions(process).getMessageExpansions(exprMsgSets, process);
return messageExpansions;
}
}
TestData[] tests = new TestData[] {
// Basic:
// empty msgSets
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{ }",
false,
"{ }"
),
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ }}",
false,
"{ }"
),
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"union()",
false,
"{ }"
),
// R_msg_to_v_f
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ (on X) ([intelligent(X)], [if intelligent(X) <=> not unintelligent(X) then 1 else 0]) }}",
false,
"{{ (on X) ([intelligent(X)], [if intelligent(X) <=> not unintelligent(X) then 1 else 0], 1) }}"
),
// R_msg_to_f_v
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPQWithPriors(),
"{{ (on X) ([if p(X) and q(X) then 1 else 0], [p(X)]) }}",
false,
"{{ (on X) ([if p(X) and q(X) then 1 else 0], [p(X)], (if p(X) then 0.2 else 0.8)) }}"
),
// union msgSets
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ (on X) ([tall(X)], [if tall(X) then 2 else 8]) }} union {{ (on X) ([tall(X)], [if tall(X) and american(X) then 7 else 1]) }}",
false,
"{{ (on X) ([tall(X)], [if tall(X) then 2 else 8], (if tall(X) then 2 else 8)) }}" +
" union " +
"{{ (on X) ([tall(X)], [if tall(X) and american(X) then 7 else 1], (if tall(X) then 8 else 2)) }}"
),
// duplicate union arguments
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ (on X) ([tall(X)], [if tall(X) then 2 else 8]) }} union {{ (on X) ([tall(X)], [if tall(X) and american(X) then 7 else 1]) }} union {{ (on Y) ([tall(Y)], [if tall(Y) then 2 else 8]) }}",
false,
"{{ (on X) ([tall(X)], [if tall(X) then 2 else 8], (if tall(X) then 2 else 8)) }}" +
" union " +
"{{ (on X) ([tall(X)], [if tall(X) and american(X) then 7 else 1], (if tall(X) then 8 else 2)) }}"
),
// not a multiset
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{ ([intelligent(a)], [if intelligent(a) <=> not unintelligent(a) then 1 else 0]) }",
true,
"{}"
),
// not a singleton multiset
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ ([intelligent(a)], [if intelligent(a) <=> not unintelligent(a) then 1 else 0]), ([intelligent(b)], [if intelligent(b) <=> not unintelligent(b) then 1 else 0]) }}",
true,
"{}"
),
// not a tuple
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ [intelligent(a)] }}",
true,
"{}"
),
// not a tuple pair
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ tuple([intelligent(a)]) }}",
true,
"{}"
),
// not a tuple with bracketed expressions
new GetMessageExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPeopleAmericanTallIntelligentUnintelligent(),
"{{ (intelligent(a), unintelligent(b)) }}",
true,
"{}"
),
};
perform(tests);
}
@Test
public void testIterateValuesUsingExpansions() {
class IterateValuesUsingExpansionsTestData extends TestData {
private String msgValues, msgExpansions;
private Expression exprMsgValues;
private Expression exprMsgExpansions;
public IterateValuesUsingExpansionsTestData(String contextualConstraint, Model model, String msgValues, String msgExpansions, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.msgValues = msgValues;
this.msgExpansions = msgExpansions;
};
@Override
public Expression getTopExpression() {
exprMsgValues = parse(msgValues);
exprMsgExpansions = parse(msgExpansions);
Expression top = Tuple.make(Arrays.asList(exprMsgValues, exprMsgExpansions));
return top;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression result = LBPFactory.newIterateValuesUsingExpansions(process).iterateValuesUsingExpansions(exprMsgValues, exprMsgExpansions, process);
return result;
}
}
TestData[] tests = new TestData[] {
// Basic:
new IterateValuesUsingExpansionsTestData(Expressions.TRUE.toString(),
new TrivialPQWithPArity2AndQArity1(),
"{{ (on X, Z) ( [p(X,a)], [if p(X,Z) and q(Z) then 1 else 0], ( if p(X,a) then 1 else 0 ) ) | X != c }}",
"{{ (on X, Z) ( [p(X,a)], [if p(X,Z) and q(Z) then 1 else 0], ( if Y != d then (previous message to [p(X,a)] from [if p(X,a) and q(Y) then 1 else 0]) else 0 ) ) | X != c }}",
false,
"{{ (on X, Z) ( [p(X,a)], [if p(X,Z) and q(Z) then 1 else 0], ( if Y != d then if p(X,a) then 1 else 0 else 0.5 ) ) | X != c }}"),
};
perform(tests);
}
@Test
public void testUseValuesForPreviousMessages() {
class UseValuesForPreviousMessagesTestData extends TestData {
private String expansion, msgValues;
private Expression exprExpansion, exprMsgValues;
public UseValuesForPreviousMessagesTestData(String contextualConstraint, Model model, String expansion, String msgValues, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.expansion = expansion;
this.msgValues = msgValues;
};
@Override
public Expression getTopExpression() {
exprExpansion = parse(expansion);
exprMsgValues = parse(msgValues);
Expression top = Tuple.make(Arrays.asList(exprExpansion, exprMsgValues));
return top;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression result = LBPFactory.newUseValuesForPreviousMessages(process).useValuesForPreviousMessages(exprExpansion, exprMsgValues, process);
return result;
}
}
TestData[] tests = new TestData[] {
// Basic:
// Example 1 from pseudo-code (no longer in latest version of doc)
new UseValuesForPreviousMessagesTestData(Expressions.TRUE.toString(),
new TrivialPQWithPArity2AndQArity1(),
"if Y != d then (previous message to [p(b,Y)] from [if p(b,Y) and q(Y) then 1 else 0]) else 0",
"{{ (on X) ( [p(X,a)], [if p(X,Z) and q(Z) then 1 else 0], ( if p(X,Z) then 1 else 0 ) ) | X != c }}",
false,
"if Y != d then if p(b,Z) then 1 else 0 else 0"),
// Variant of Example 1 from the pseudo-code (on X, Z) instead of (on X)
new UseValuesForPreviousMessagesTestData(Expressions.TRUE.toString(),
new TrivialPQWithPArity2AndQArity1(),
"if Y != d then (previous message to [p(b,Y)] from [if p(b,Y) and q(Y) then 1 else 0]) else 0",
"{{ (on X, Z) ( [p(X,a)], [if p(X,Z) and q(Z) then 1 else 0], ( if p(X,Z) then 1 else 0 ) ) | X != c }}",
false,
"if Y != d then if p(b,a) then 1 else 0 else 0"),
// Variant of Example 1 from the pseudo-code (on X, Z) instead of (on X) and [p(X, Z)] instead of [p(X,a)]
new UseValuesForPreviousMessagesTestData(Expressions.TRUE.toString(),
new TrivialPQWithPArity2AndQArity1(),
"if Y != d then (previous message to [p(b,Y)] from [if p(b,Y) and q(Y) then 1 else 0]) else 0",
"{{ (on X, Z) ( [p(X,Z)], [if p(X,Z) and q(Z) then 1 else 0], ( if p(X,Z) then 1 else 0 ) ) | X != c }}",
false,
"if Y != d then if p(b,Y) then 1 else 0 else 0"),
};
perform(tests);
}
@Test
public void testPickSingleElement() {
class PickSingleElementTestData extends TestData {
private String intensionalSetString;
private Expression intensionalSet;
public PickSingleElementTestData(String contextualConstraint, Model model, String intensionalSet, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.intensionalSetString = intensionalSet;
};
@Override
public Expression getTopExpression() {
intensionalSet = parse(intensionalSetString);
return intensionalSet;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression result = LPIUtil.pickSingleElement(intensionalSet, process);
return result;
}
}
TestData[] tests = new TestData[] {
// Basic:
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{ ([p(a)], [if p(a) then 1 else 0], 1) | true }",
false,
"([p(a)], [if p(a) then 1 else 0], 1)"
),
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{ (on X) ([p(X)], [if p(X) then 1 else 0], 1) | X = a}",
false,
"([p(a)], [if p(a) then 1 else 0], 1)"
),
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{(on X, Y) ([p(X)], [if p(X) and q(Y) then 1 else 0], 1) | X = a and Y = b}",
false,
"([p(a)], [if p(a) and q(b) then 1 else 0], 1)"
),
// Expression is free of the index expressions so should
// be selected straight away
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on X' in People ) (if sick(X) then 1 else 0) | X = X' }",
false,
"if sick(X) then 1 else 0"
),
// Ensure the free variable is selected
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on X' in People ) (if sick(X') then 1 else 0) | X = X' }",
false,
"if sick(X) then 1 else 0"
),
// Ensure constants take precedence over variables.
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on X' in People ) (if sick(X') then 1 else 0) | X = X' = person1 }",
false,
"if sick(person1) then 1 else 0"
),
// Ensure the free variable X is selected from the common set of disjuncts
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on X' in People ) (if sick(X') then 1 else 0) | X = X' = person1 or X = X' = person2 or X = X' = person3 }",
false,
"if sick(X) then 1 else 0"
),
// Ensure the free Variable X is selected and not the scoped W variable
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on W in People, X' in People ) (if sick(X') then 1 else 0) | W = X = X' = person1 or W = X = X' = person2 or W = X = X' = person3 }",
false,
"if sick(X) then 1 else 0"
),
// Ensure the free Variable X is selected and not the scoped W variable
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialSickbob(),
"{ ( on X' in People, W in People ) (if sick(X') then 1 else 0) | W = X = X' }",
false,
"if sick(X) then 1 else 0"
),
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{ ( on X', Y ) (if p(X') or p(Y) then 1 else 0) | (X = X' or X = Y) and X' = Y }",
false,
"if p(X) or p(X) then 1 else 0"
),
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{ ( on X', Y ) (if p(X') then 2 else 1) | (X = X' or X = Y) and X' != Y and X' = X }",
false,
"if p(X) then 2 else 1"
),
new PickSingleElementTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"{ ( on X', Y) (if p(Y) then 2 else 1) | (X = X' or X = Y) and X' != Y and X' != X }",
false,
"if p(X) then 2 else 1"
),
};
perform(tests);
}
@Test
public void testPickValue() {
class PickValueTestData extends TestData {
private String X, I, C;
private Expression exprX, exprI, exprC;
public PickValueTestData(String contextualConstraint, Model model, String X, String I, String C, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.X = X;
this.I = I;
this.C = C;
};
@Override
public Expression getTopExpression() {
this.exprX = parse(X);
this.exprI = parse(I);
this.exprC = parse(C);
Expression top = Tuple.make(Arrays.asList(exprX, exprI, exprC));
return top;
}
@Override
public Expression callRewrite(RewritingProcess process) {
Expression result = LPIUtil.pickValue(exprX, exprI, exprC, process);
return result;
}
}
TestData[] tests = new TestData[] {
// Basic:
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X",
"{X}",
"X = a",
false,
"a"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X",
"{X, Y}",
"X = a and Y = b",
false,
"a"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"Y",
"{X, Y}",
"X = a and Y = b",
false,
"b"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X'",
"{X'}",
"X = X'",
false,
"X"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X'",
"{X'}",
"X = X' = person1",
false,
"person1"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X'",
"{X'}",
"X = X' = person1 or X = X' = person2 or X = X' = person3",
false,
"X"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X'",
"{X', W}",
"W = X = X' = person1 or W = X = X' = person2 or W = X = X' = person3",
false,
"X"
),
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"X'",
"{X', W}",
"W = X = X'",
false,
"X"
),
// Basic: Requires There Exists formula simplification
new PickValueTestData(Expressions.TRUE.toString(),
new TrivialPQ(),
"Y",
"{X, Y, Z}",
"X = a and (Y = b or Y = c) and (Z = b => Y = c) and Z = b ",
false,
"c"
),
};
perform(tests);
}
@Test
public void testBeliefForLoopyModels() {
class LoopyBeliefTestData extends TestData {
private String belief;
private Expression exprBelief;
private Map<Object, Object> globalObjects;
public LoopyBeliefTestData(String contextualConstraint, Model model, String belief, boolean illegalArgumentTest, String expected) {
this(contextualConstraint, model, belief, null, illegalArgumentTest, expected);
};
public LoopyBeliefTestData(String contextualConstraint, Model model, String belief, Map<Object, Object> globalObjects, boolean illegalArgumentTest, String expected) {
super(contextualConstraint, model, illegalArgumentTest, expected);
this.belief = belief;
this.globalObjects = globalObjects;
};
@Override
public Expression getTopExpression() {
this.exprBelief = parse(belief);
return this.exprBelief;
}
@Override
public Expression callRewrite(RewritingProcess process) {
if (globalObjects != null) {
process.getGlobalObjects().putAll(globalObjects);
}
LBPConfiguration configuration = LBPFactory.newLBPConfiguration();
// Currently only schedule that will work with a loopy model
configuration.setBeliefPropagationUpdateSchedule(LBPConfiguration.BeliefPropagationUpdateSchedule.SYNCHRONOUS);
RewritingProcess lbpProcess = LBPFactory.newLBPProcess(process.getRootExpression(), configuration, process);
Expression belief = lbpProcess.rewrite(LBPRewriter.R_belief, exprBelief);
Expression roundedBelief = Expressions.roundToAGivenPrecision(belief, 9);
return roundedBelief;
}
};
TestData[] tests = new TestData[] {
// Basic:
//
new LoopyBeliefTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQ(),
"belief([p(X)])",
false,
// Note: (LBP 10 iterations) - these values oscillate
// and this can be seen in grounded versions of
// this model (domain size 10).
// MRF - varelim
// True False
// p(obj1) 0.749878 0.250122
// q(obj1) 0.749878 0.250122
// MRF - libdaiBP
// True False
// p(obj1) 0.998927 0.001073
// q(obj1) 0.505314 0.494686
"if p(X) then 0.998927766 else 0.00107223386"
),
//
new LoopyBeliefTestData(Expressions.TRUE.toString(),
new TrivialLoopyPQWithPriors(),
"belief([p(X)])",
false,
// Note: (LBP 10 iterations) - these values are equivalent
// to the values returned from running variable eliminations (i.e. exact)
// on a grounded version of this model (domain size 10).
// MRF - varelim
// True False
// p(obj1) 0.996017 0.003983
// q(obj1) 0.997635 0.002365
"if p(X) then 0.996016637 else 0.00398336255"
),
//
new LoopyBeliefTestData(Expressions.TRUE.toString(),
new TrivialLoopyParfactorsExample(),
"belief([m(X)])",
false,
// Note: (LBP 10 iterations)
// MRF - varelim
// True False
// g(obj1) 0.246396 0.753604
// m(obj1) 0.246396 0.753604
// MRF - libdaiBP
// True False
// g(obj1) 0.225974 0.774026
// m(obj1) 0.225974 0.774026
// Appears not to be converging.
"if m(X) then 0.103394195 else 0.896605805"
),
};
perform(tests);
}
@Test
public void testLBPQueryEngine() {
final LBPQueryEngine queryEngine = LBPFactory.newLBPQueryEngine();
final String queryUUID1 = queryEngine.newQueryUUID();
// Note: for the second query I'm changing the query options so that the
// domain size is not known so should get a formula back with this query.
final String queryUUID2 = queryEngine.newQueryUUID(new LBPQueryEngine.QueryOptions(false, true, true));
final String queryUUID3 = queryEngine.newQueryUUID();
final Map<String, StringBuilder> queryTraceOutput = new HashMap<String, StringBuilder>();
queryTraceOutput.put(queryUUID1, new StringBuilder());
queryTraceOutput.put(queryUUID2, new StringBuilder());
queryTraceOutput.put(queryUUID3, new StringBuilder());
final Map<String, StringBuilder> queryJustificationOutput = new HashMap<String, StringBuilder>();
queryJustificationOutput.put(queryUUID1, new StringBuilder());
queryJustificationOutput.put(queryUUID2, new StringBuilder());
queryJustificationOutput.put(queryUUID3, new StringBuilder());
queryEngine.addQueryStepListener(new LBPQueryEngine.QueryStepListener() {
@Override
public void queryStepStarting(String queryUUID, String description) {
System.out.println("Starting: "+description+ " " + queryUUID);
}
@Override
public void queryStepComplete(String queryUUID, QueryStep completedStep) {
System.out.println("Finished: " + completedStep.getDescription() + " in " + completedStep.getTimeInNanoseconds() + " nanoseconds " + queryUUID);
for (QueryStep subStep : completedStep.getSubSteps()) {
System.out.println(" "+subStep.getDescription() + " in " + subStep.getTimeInNanoseconds() + " nanoseconds "+queryUUID);
}
}
});
LBPQueryEngine.TraceListener traceListener = new LBPQueryEngine.TraceListener() {
@Override
public void traceEvent(String queryUUID, int traceLevel, Long profileInfo, Marker marker,
String formattedMsg, Object... args) {
StringBuilder sb = queryTraceOutput.get(queryUUID);
sb.append("|");
sb.append(indent(traceLevel));
sb.append(formattedMsg);
sb.append("\n");
}
};
queryEngine.addTraceListener(traceListener);
LBPQueryEngine.JustificationListener justificationListener = new LBPQueryEngine.JustificationListener() {
@Override
public void justificationEvent(String queryUUID, int justificationLevel,
Marker marker, String formattedMsg, Object... args) {
StringBuilder sb = queryJustificationOutput.get(queryUUID);
sb.append("|");
sb.append(indent(justificationLevel));
sb.append(formattedMsg);
if ("".equals(formattedMsg) && args != null && args.length == 1) {
sb.append(args[0]);
}
sb.append("\n");
}
};
queryEngine.addJustificationListener(justificationListener);
final Map<String, String> results = new HashMap<String, String>();
Runnable callQuery1 = new Runnable() {
@Override
public void run() {
String result = queryEngine.queryBeliefOfRandomVariable(queryUUID1,
"belief([rich(X)])",
(new TrivialGaveTreasureToOwnsRich()).getModelDeclaration());
results.put(queryUUID1, result);
}
};
Runnable callQuery2 = new Runnable() {
@Override
public void run() {
String result = queryEngine.queryBeliefOfRandomVariable(queryUUID2,
"belief([rich(bob)])",
(new TrivialGaveTreasureToOwnsRich()).getModelDeclaration());
results.put(queryUUID2, result);
}
};
Runnable callQuery3 = new Runnable() {
@Override
public void run() {
String result = queryEngine.queryBeliefOfRandomVariable(queryUUID3,
"belief([rich(bob)])",
(new TrivialGaveTreasureToOwnsRich()).getModelDeclaration(),
// Here I have evidence that bob has 100% probability of being rich.
"parfactors({[if rich(bob) then 1 else 0]})");
results.put(queryUUID3, result);
}
};
Thread callQuery1Thread = new Thread(callQuery1);
Thread callQuery2Thread = new Thread(callQuery2);
Thread callQuery3Thread = new Thread(callQuery3);
// Note: The tree util UI is not multi-threaded so want to ensure is turned off
GrinderConfiguration.setProperty(GrinderConfiguration.KEY_DISPLAY_TREE_UTIL_UI, "false");
GrinderConfiguration.inheritConfiguration(Thread.currentThread(), callQuery1Thread);
GrinderConfiguration.inheritConfiguration(Thread.currentThread(), callQuery2Thread);
GrinderConfiguration.inheritConfiguration(Thread.currentThread(), callQuery3Thread);
try {
callQuery1Thread.start();
callQuery2Thread.start();
callQuery3Thread.start();
callQuery1Thread.join();
callQuery2Thread.join();
callQuery3Thread.join();
} catch (InterruptedException ie) {
Assert.fail();
}
queryEngine.removeTraceListener(traceListener);
queryEngine.removeJustificationListener(justificationListener);
System.out.println("QUERY 1 - TRACE");
System.out.println(queryTraceOutput.get(queryUUID1).toString());
System.out.println("QUERY 1 - JUSTIFICATION");
System.out.println(queryJustificationOutput.get(queryUUID1).toString());
System.out.println("QUERY 2 - TRACE");
System.out.println(queryTraceOutput.get(queryUUID2).toString());
System.out.println("QUERY 2 - JUSTIFICATION");
System.out.println(queryJustificationOutput.get(queryUUID2).toString());
System.out.println("QUERY 3 (with Evidence) - TRACE");
System.out.println(queryTraceOutput.get(queryUUID3).toString());
System.out.println("QUERY 3 (with Evidence) - JUSTIFICATION");
System.out.println(queryJustificationOutput.get(queryUUID3).toString());
// Note: old R_basic result
// Assert.assertEquals("if rich(X) then (1 + 2 ^ | People |) ^ | Treasure | / ((1 + 2 ^ | People |) ^ | Treasure | + 1) else 1 / ((1 + 2 ^ | People |) ^ | Treasure | + 1)", results.get(queryUUID1));
Assert.assertEquals("if rich(X) then 1 else 0.000000000000000000000000000000781198", results.get(queryUUID1));
// Note: old R_basic result
// Assert.assertEquals("if rich(bob) then (1 + 2 ^ | People |) ^ | Treasure | / ((1 + 2 ^ | People |) ^ | Treasure | + 1) else 1 / ((1 + 2 ^ | People |) ^ | Treasure | + 1)", results.get(queryUUID2));
Assert.assertEquals("if rich(bob) then (1 + 2 ^ | type(X) |) ^ | type(Z) | / ((1 + 2 ^ | type(X) |) ^ | type(Z) | + 1) else 1 / ((1 + 2 ^ | type(X) |) ^ | type(Z) | + 1)", results.get(queryUUID2));
// Note: This is essentially the evidence reflected back.
Assert.assertEquals("if rich(bob) then 1 else 0", results.get(queryUUID3));
}
private String indent(int level) {
StringBuilder indent = new StringBuilder();
for (int i = 0; i < level; i++) {
indent.append(" ");
}
return indent.toString();
}
}
|
package com.horcrux.svg;
import android.annotation.SuppressLint;
import android.content.res.AssetManager;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathMeasure;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.os.Build;
import android.view.View;
import android.view.ViewParent;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.views.text.ReactFontManager;
import java.util.ArrayList;
import javax.annotation.Nullable;
import static android.graphics.Matrix.MTRANS_X;
import static android.graphics.Matrix.MTRANS_Y;
import static android.graphics.PathMeasure.POSITION_MATRIX_FLAG;
import static android.graphics.PathMeasure.TANGENT_MATRIX_FLAG;
import static com.horcrux.svg.TextProperties.*;
@SuppressLint("ViewConstructor")
class TSpanView extends TextView {
private static final double tau = 2 * Math.PI;
private static final double radToDeg = 360 / tau;
private static final String FONTS = "fonts/";
private static final String OTF = ".otf";
private static final String TTF = ".ttf";
private Path mCachedPath;
@Nullable String mContent;
private TextPathView textPath;
ArrayList<String> emoji = new ArrayList<>();
ArrayList<Matrix> emojiTransforms = new ArrayList<>();
public TSpanView(ReactContext reactContext) {
super(reactContext);
}
@ReactProp(name = "content")
public void setContent(@Nullable String content) {
mContent = content;
invalidate();
}
@Override
public void invalidate() {
mCachedPath = null;
super.invalidate();
}
@Override
void draw(Canvas canvas, Paint paint, float opacity) {
if (mContent != null) {
int numEmoji = emoji.size();
if (numEmoji > 0) {
GlyphContext gc = getTextRootGlyphContext();
FontData font = gc.getFont();
applyTextPropertiesToPaint(paint, font);
for (int i = 0; i < numEmoji; i++) {
String current = emoji.get(i);
Matrix mid = emojiTransforms.get(i);
canvas.save();
canvas.concat(mid);
canvas.drawText(current, 0, 0, paint);
canvas.restore();
}
}
drawPath(canvas, paint, opacity);
} else {
clip(canvas, paint);
drawGroup(canvas, paint, opacity);
}
}
@Override
Path getPath(Canvas canvas, Paint paint) {
if (mCachedPath != null) {
return mCachedPath;
}
if (mContent == null) {
mCachedPath = getGroupPath(canvas, paint);
return mCachedPath;
}
setupTextPath();
pushGlyphContext();
mCachedPath = getLinePath(mContent, paint, canvas);
popGlyphContext();
return mCachedPath;
}
double getSubtreeTextChunksTotalAdvance(Paint paint) {
if (!Double.isNaN(cachedAdvance)) {
return cachedAdvance;
}
double advance = 0;
if (mContent == null) {
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
if (child instanceof TextView) {
TextView text = (TextView)child;
advance += text.getSubtreeTextChunksTotalAdvance(paint);
}
}
cachedAdvance = advance;
return advance;
}
String line = mContent;
final int length = line.length();
if (length == 0) {
cachedAdvance = 0;
return advance;
}
GlyphContext gc = getTextRootGlyphContext();
FontData font = gc.getFont();
applyTextPropertiesToPaint(paint, font);
double letterSpacing = font.letterSpacing;
final boolean allowOptionalLigatures = letterSpacing == 0 &&
font.fontVariantLigatures == FontVariantLigatures.normal;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
String required = "'rlig', 'liga', 'clig', 'calt', 'locl', 'ccmp', 'mark', 'mkmk',";
String defaultFeatures = required + "'kern', ";
if (allowOptionalLigatures) {
String additionalLigatures = "'hlig', 'cala', ";
paint.setFontFeatureSettings(defaultFeatures + additionalLigatures + font.fontFeatureSettings);
} else {
String disableDiscretionaryLigatures = "'liga' 0, 'clig' 0, 'dlig' 0, 'hlig' 0, 'cala' 0, ";
paint.setFontFeatureSettings(defaultFeatures + disableDiscretionaryLigatures + font.fontFeatureSettings);
}
paint.setLetterSpacing((float)(letterSpacing / (font.fontSize * mScale)));
}
cachedAdvance = paint.measureText(line);
return cachedAdvance;
}
@SuppressWarnings("ConstantConditions")
private Path getLinePath(String line, Paint paint, Canvas canvas) {
final int length = line.length();
final Path path = new Path();
if (length == 0) {
return path;
}
double pathLength = 0;
PathMeasure pm = null;
boolean isClosed = false;
final boolean hasTextPath = textPath != null;
if (hasTextPath) {
pm = new PathMeasure(textPath.getTextPath(canvas, paint), false);
pathLength = pm.getLength();
isClosed = pm.isClosed();
if (pathLength == 0) {
return path;
}
}
GlyphContext gc = getTextRootGlyphContext();
FontData font = gc.getFont();
applyTextPropertiesToPaint(paint, font);
GlyphPathBag bag = new GlyphPathBag(paint);
boolean[] ligature = new boolean[length];
final char[] chars = line.toCharArray();
double kerning = font.kerning;
double wordSpacing = font.wordSpacing;
double letterSpacing = font.letterSpacing;
final boolean autoKerning = !font.manualKerning;
final boolean allowOptionalLigatures = letterSpacing == 0 &&
font.fontVariantLigatures == FontVariantLigatures.normal;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
String required = "'rlig', 'liga', 'clig', 'calt', 'locl', 'ccmp', 'mark', 'mkmk',";
String defaultFeatures = required + "'kern', ";
if (allowOptionalLigatures) {
String additionalLigatures = "'hlig', 'cala', ";
paint.setFontFeatureSettings(defaultFeatures + additionalLigatures + font.fontFeatureSettings);
} else {
String disableDiscretionaryLigatures = "'liga' 0, 'clig' 0, 'dlig' 0, 'hlig' 0, 'cala' 0, ";
paint.setFontFeatureSettings(defaultFeatures + disableDiscretionaryLigatures + font.fontFeatureSettings);
}
}
// OpenType.js font data
ReadableMap fontData = font.fontData;
float[] advances = new float[length];
paint.getTextWidths(line, advances);
/*
This would give both advances and textMeasure in one call / looping over the text
double textMeasure = paint.getTextRunAdvances(line, 0, length, 0, length, true, advances, 0);
*/
final TextAnchor textAnchor = font.textAnchor;
TextView anchorRoot = getTextAnchorRoot();
final double textMeasure = anchorRoot.getSubtreeTextChunksTotalAdvance(paint);
double offset = getTextAnchorOffset(textAnchor, textMeasure);
applyTextPropertiesToPaint(paint, font);
int side = 1;
double startOfRendering = 0;
double endOfRendering = pathLength;
final double fontSize = gc.getFontSize();
boolean sharpMidLine = false;
if (hasTextPath) {
sharpMidLine = textPath.getMidLine() == TextPathMidLine.sharp;
/*
Name
side
Value
left | right
initial value
left
Animatable
yes
Determines the side of the path the text is placed on
(relative to the path direction).
Specifying a value of right effectively reverses the path.
Added in SVG 2 to allow text either inside or outside closed subpaths
and basic shapes (e.g. rectangles, circles, and ellipses).
Adding 'side' was resolved at the Sydney (2015) meeting.
*/
side = textPath.getSide() == TextPathSide.right ? -1 : 1;
final double absoluteStartOffset = getAbsoluteStartOffset(textPath.getStartOffset(), pathLength, fontSize);
offset += absoluteStartOffset;
if (isClosed) {
final double halfPathDistance = pathLength / 2;
startOfRendering = absoluteStartOffset + (textAnchor == TextAnchor.middle ? -halfPathDistance : 0);
endOfRendering = startOfRendering + pathLength;
}
}
double scaleSpacingAndGlyphs = 1;
if (mTextLength != null) {
final double author = PropHelper.fromRelative(mTextLength, canvas.getWidth(), 0, mScale, fontSize);
if (author < 0) {
throw new IllegalArgumentException("Negative textLength value");
}
switch (mLengthAdjust) {
default:
case spacing:
letterSpacing += (author - textMeasure) / (length - 1);
break;
case spacingAndGlyphs:
scaleSpacingAndGlyphs = author / textMeasure;
break;
}
}
final double scaledDirection = scaleSpacingAndGlyphs * side;
final Paint.FontMetrics fm = paint.getFontMetrics();
final double descenderDepth = fm.descent;
final double bottom = descenderDepth + fm.leading;
final double ascenderHeight = -fm.ascent + fm.leading;
final double top = -fm.top;
final double totalHeight = top + bottom;
double baselineShift = 0;
String baselineShiftString = getBaselineShift();
AlignmentBaseline baseline = getAlignmentBaseline();
if (baseline != null) {
// TODO alignment-baseline, test / verify behavior
// TODO get per glyph baselines from font baseline table, for high-precision alignment
switch (baseline) {
default:
case baseline:
// Use the dominant baseline choice of the parent.
baselineShift = 0;
break;
case textBottom:
case afterEdge:
case textAfterEdge:
// text-after-edge = text-bottom
// text-after-edge = descender depth
baselineShift = -descenderDepth;
break;
case alphabetic:
// alphabetic = 0
baselineShift = 0;
break;
case ideographic:
// ideographic = descender depth
baselineShift = -descenderDepth;
break;
case middle:
// Align the vertical midpoint of the box with the baseline of the parent box plus half the x-height of the parent.
// middle = x height / 2
Rect bounds = new Rect();
// this will just retrieve the bounding rect for 'x'
paint.getTextBounds("x", 0, 1, bounds);
int xHeight = bounds.height();
baselineShift = xHeight / 2.0;
break;
case central:
// central = (ascender height - descender depth) / 2
baselineShift = (ascenderHeight - descenderDepth) / 2;
break;
case mathematical:
// Hanging and mathematical baselines
// There are no obvious formulas to calculate the position of these baselines.
// At the time of writing FOP puts the hanging baseline at 80% of the ascender
// height and the mathematical baseline at 50%.
baselineShift = 0.5 * ascenderHeight;
break;
case hanging:
baselineShift = 0.8 * ascenderHeight;
break;
case textTop:
case beforeEdge:
case textBeforeEdge:
// text-before-edge = text-top
// text-before-edge = ascender height
baselineShift = ascenderHeight;
break;
case bottom:
// Align the top of the aligned subtree with the top of the line box.
baselineShift = bottom;
break;
case center:
// Align the center of the aligned subtree with the center of the line box.
baselineShift = totalHeight / 2;
break;
case top:
// Align the bottom of the aligned subtree with the bottom of the line box.
baselineShift = top;
break;
}
}
if (baselineShiftString != null && !baselineShiftString.isEmpty()) {
switch (baseline) {
case top:
case bottom:
break;
default:
switch (baselineShiftString) {
case "sub":
// TODO
if (fontData != null && fontData.hasKey("tables") && fontData.hasKey("unitsPerEm")) {
int unitsPerEm = fontData.getInt("unitsPerEm");
ReadableMap tables = fontData.getMap("tables");
if (tables.hasKey("os2")) {
ReadableMap os2 = tables.getMap("os2");
if (os2.hasKey("ySubscriptYOffset")) {
double subOffset = os2.getDouble("ySubscriptYOffset");
baselineShift += mScale * fontSize * subOffset / unitsPerEm;
}
}
}
break;
case "super":
// TODO
if (fontData != null && fontData.hasKey("tables") && fontData.hasKey("unitsPerEm")) {
int unitsPerEm = fontData.getInt("unitsPerEm");
ReadableMap tables = fontData.getMap("tables");
if (tables.hasKey("os2")) {
ReadableMap os2 = tables.getMap("os2");
if (os2.hasKey("ySuperscriptYOffset")) {
double superOffset = os2.getDouble("ySuperscriptYOffset");
baselineShift -= mScale * fontSize * superOffset / unitsPerEm;
}
}
}
break;
case "baseline":
break;
default:
baselineShift -= PropHelper.fromRelative(baselineShiftString, mScale * fontSize, 0, mScale, fontSize);
}
break;
}
}
final Matrix start = new Matrix();
final Matrix mid = new Matrix();
final Matrix end = new Matrix();
final float[] startPointMatrixData = new float[9];
final float[] endPointMatrixData = new float[9];
emoji.clear();
emojiTransforms.clear();
for (int index = 0; index < length; index++) {
char currentChar = chars[index];
String current = String.valueOf(currentChar);
boolean alreadyRenderedGraphemeCluster = ligature[index];
/*
Determine the glyph's charwidth (i.e., the amount which the current text position
advances horizontally when the glyph is drawn using horizontal text layout).
*/
boolean hasLigature = false;
if (alreadyRenderedGraphemeCluster) {
current = "";
} else {
int nextIndex = index;
while (++nextIndex < length) {
float nextWidth = advances[nextIndex];
if (nextWidth > 0) {
break;
}
String nextLigature = current + String.valueOf(chars[nextIndex]);
ligature[nextIndex] = true;
current = nextLigature;
hasLigature = true;
}
}
double charWidth = paint.measureText(current) * scaleSpacingAndGlyphs;
if (autoKerning) {
double kerned = advances[index] * scaleSpacingAndGlyphs;
kerning = kerned - charWidth;
}
boolean isWordSeparator = currentChar == ' ';
double wordSpace = isWordSeparator ? wordSpacing : 0;
double spacing = wordSpace + letterSpacing;
double advance = charWidth + spacing;
double x = gc.nextX(alreadyRenderedGraphemeCluster ? 0 : kerning + advance);
double y = gc.nextY();
double dx = gc.nextDeltaX();
double dy = gc.nextDeltaY();
double r = gc.nextRotation();
if (alreadyRenderedGraphemeCluster || isWordSeparator) {
// Skip rendering other grapheme clusters of ligatures (already rendered),
// But, make sure to increment index positions by making gc.next() calls.
continue;
}
advance *= side;
charWidth *= side;
double cursor = offset + (x + dx) * side;
double startPoint = cursor - advance;
if (hasTextPath) {
/*
Determine the point on the curve which is charwidth distance along the path from
the startpoint-on-the-path for this glyph, calculated using the user agent's
distance along the path algorithm. This point is the endpoint-on-the-path for
the glyph.
*/
double endPoint = startPoint + charWidth;
/*
Determine the midpoint-on-the-path, which is the point on the path which is
"halfway" (user agents can choose either a distance calculation or a parametric
calculation) between the startpoint-on-the-path and the endpoint-on-the-path.
*/
double halfWay = charWidth / 2;
double midPoint = startPoint + halfWay;
// Glyphs whose midpoint-on-the-path are off the path are not rendered.
if (midPoint > endOfRendering) {
continue;
} else if (midPoint < startOfRendering) {
continue;
}
final int posAndTanFlags = POSITION_MATRIX_FLAG | TANGENT_MATRIX_FLAG;
if (sharpMidLine) {
pm.getMatrix((float) midPoint, mid, posAndTanFlags);
} else {
if (startPoint < 0) {
pm.getMatrix(0, start, posAndTanFlags);
start.preTranslate((float) startPoint, 0);
} else {
pm.getMatrix((float) startPoint, start, POSITION_MATRIX_FLAG);
}
pm.getMatrix((float) midPoint, mid, POSITION_MATRIX_FLAG);
if (endPoint > pathLength) {
pm.getMatrix((float) pathLength, end, posAndTanFlags);
end.preTranslate((float) (endPoint - pathLength), 0);
} else {
pm.getMatrix((float) endPoint, end, POSITION_MATRIX_FLAG);
}
start.getValues(startPointMatrixData);
end.getValues(endPointMatrixData);
double startX = startPointMatrixData[MTRANS_X];
double startY = startPointMatrixData[MTRANS_Y];
double endX = endPointMatrixData[MTRANS_X];
double endY = endPointMatrixData[MTRANS_Y];
// line through the startpoint-on-the-path and the endpoint-on-the-path
double lineX = endX - startX;
double lineY = endY - startY;
double glyphMidlineAngle = Math.atan2(lineY, lineX);
mid.preRotate((float) (glyphMidlineAngle * radToDeg * side));
}
mid.preTranslate((float) -halfWay, (float) (dy + baselineShift));
mid.preScale((float) scaledDirection, (float) side);
mid.postTranslate(0, (float) y);
} else {
mid.setTranslate((float) startPoint, (float) (y + dy + baselineShift));
}
mid.preRotate((float) r);
Path glyph;
if (hasLigature) {
glyph = new Path();
paint.getTextPath(current, 0, current.length(), 0, 0, glyph);
} else {
glyph = bag.getOrCreateAndCache(currentChar, current);
}
RectF bounds = new RectF();
glyph.computeBounds(bounds, true);
float width = bounds.width();
if (width == 0) { // Render unicode emoji
canvas.save();
canvas.concat(mid);
emoji.add(current);
emojiTransforms.add(new Matrix(mid));
canvas.drawText(current, 0, 0, paint);
canvas.restore();
} else {
glyph.transform(mid);
path.addPath(glyph);
}
}
return path;
}
private double getAbsoluteStartOffset(SVGLength startOffset, double distance, double fontSize) {
return PropHelper.fromRelative(startOffset, distance, 0, mScale, fontSize);
}
private double getTextAnchorOffset(TextAnchor textAnchor, double textMeasure) {
switch (textAnchor) {
default:
case start:
return 0;
case middle:
return -textMeasure / 2;
case end:
return -textMeasure;
}
}
private void applyTextPropertiesToPaint(Paint paint, FontData font) {
AssetManager assetManager = mContext.getResources().getAssets();
double fontSize = font.fontSize * mScale;
boolean isBold = font.fontWeight == FontWeight.Bold;
boolean isItalic = font.fontStyle == FontStyle.italic;
/*
boolean underlineText = false;
boolean strikeThruText = false;
TextDecoration decoration = font.textDecoration;
if (decoration == TextDecoration.Underline) {
underlineText = true;
} else if (decoration == TextDecoration.LineThrough) {
strikeThruText = true;
}
*/
int fontStyle;
if (isBold && isItalic) {
fontStyle = Typeface.BOLD_ITALIC;
} else if (isBold) {
fontStyle = Typeface.BOLD;
} else if (isItalic) {
fontStyle = Typeface.ITALIC;
} else {
fontStyle = Typeface.NORMAL;
}
Typeface typeface = null;
final String fontFamily = font.fontFamily;
try {
String path = FONTS + fontFamily + OTF;
typeface = Typeface.createFromAsset(assetManager, path);
} catch (Exception ignored) {
try {
String path = FONTS + fontFamily + TTF;
typeface = Typeface.createFromAsset(assetManager, path);
} catch (Exception ignored2) {
try {
typeface = ReactFontManager.getInstance().getTypeface(fontFamily, fontStyle, assetManager);
} catch (Exception ignored3) {
}
}
}
// NB: if the font family is null / unsupported, the default one will be used
paint.setTypeface(typeface);
paint.setTextSize((float) fontSize);
paint.setTextAlign(Paint.Align.LEFT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
paint.setLetterSpacing(0);
}
// Do these have any effect for anyone? Not for me (@msand) at least.
// paint.setUnderlineText(underlineText);
// paint.setStrikeThruText(strikeThruText);
}
private void setupTextPath() {
ViewParent parent = getParent();
while (parent != null) {
if (parent.getClass() == TextPathView.class) {
textPath = (TextPathView) parent;
break;
} else if (!(parent instanceof TextView)) {
break;
}
parent = parent.getParent();
}
}
@Override
int hitTest(final float[] src) {
if (mContent == null) {
return super.hitTest(src);
}
if (mPath == null || !mInvertible || !mTransformInvertible) {
return -1;
}
float[] dst = new float[2];
mInvMatrix.mapPoints(dst, src);
mInvTransform.mapPoints(dst);
int x = Math.round(dst[0]);
int y = Math.round(dst[1]);
if (mRegion == null && mFillPath != null) {
mRegion = getRegion(mFillPath);
}
if (mRegion == null && mPath != null) {
mRegion = getRegion(mPath);
}
if (mStrokeRegion == null && mStrokePath != null) {
mStrokeRegion = getRegion(mStrokePath);
}
if (
(mRegion == null || !mRegion.contains(x, y)) &&
(mStrokeRegion == null || !mStrokeRegion.contains(x, y))
) {
return -1;
}
Path clipPath = getClipPath();
if (clipPath != null) {
if (mClipRegionPath != clipPath) {
mClipRegionPath = clipPath;
mClipRegion = getRegion(clipPath);
}
if (!mClipRegion.contains(x, y)) {
return -1;
}
}
return getId();
}
}
|
package com.wizzardo.epoll;
import com.wizzardo.epoll.readable.ReadableBuilder;
import com.wizzardo.epoll.readable.ReadableByteBuffer;
import com.wizzardo.epoll.readable.ReadableData;
import com.wizzardo.epoll.threadpool.ThreadPool;
import com.wizzardo.tools.http.HttpClient;
import com.wizzardo.tools.misc.Stopwatch;
import com.wizzardo.tools.security.MD5;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.*;
import java.util.Enumeration;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
public class EpollServerTest {
@Test
public void startStopTest() throws InterruptedException {
int port = 9091;
EpollServer server = new EpollServer(port);
server.start();
Thread.sleep(500);
server.stopEpoll();
Thread.sleep(510);
String connectionRefuse = null;
try {
new Socket("localhost", port);
} catch (IOException e) {
connectionRefuse = e.getMessage();
}
Assert.assertEquals("Connection refused", connectionRefuse);
}
@Test
public void echoTest() throws InterruptedException {
int port = 9090;
EpollServer server = new EpollServer(port) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onRead(Connection connection) {
try {
byte[] b = new byte[1024];
int r = connection.read(b, 0, b.length, this);
connection.write(b, 0, r, this);
} catch (IOException e) {
e.printStackTrace();
}
}
};
}
};
server.start();
try {
Socket s = new Socket("localhost", port);
OutputStream out = s.getOutputStream();
out.write("hello world!".getBytes());
InputStream in = s.getInputStream();
byte[] b = new byte[1024];
int r = in.read(b);
Assert.assertEquals("hello world!", new String(b, 0, r));
} catch (IOException e) {
e.printStackTrace();
assert e == null;
}
server.stopEpoll();
}
@Test
public void builderTest() throws InterruptedException {
int port = 9094;
final ReadableData partOne = new ReadableByteBuffer(new ByteBufferWrapper("Hello ".getBytes()));
final ReadableData partTwo = new ReadableByteBuffer(new ByteBufferWrapper("world!".getBytes()));
EpollServer server = new EpollServer(port) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onConnect(Connection connection) {
connection.write(new ReadableBuilder().append(partOne).append(partTwo), this);
}
};
}
};
server.start();
try {
Socket s = new Socket("localhost", port);
InputStream in = s.getInputStream();
byte[] b = new byte[1024];
int r = in.read(b);
Assert.assertEquals("Hello world!", new String(b, 0, r));
} catch (IOException e) {
e.printStackTrace();
assert e == null;
}
server.stopEpoll();
}
static class BufferedConnection extends Connection {
final byte[] buffer = new byte[128];
volatile int count;
public BufferedConnection(int fd, int ip, int port) {
super(fd, ip, port);
}
}
// @Test
public void httpTest() throws InterruptedException {
int port = 8084;
final int poolSize = 2;
final ThreadPool pool = new ThreadPool(poolSize);
final ThreadLocal<ByteBufferProvider> threadLocal = new ThreadLocal<ByteBufferProvider>() {
@Override
protected ByteBufferProvider initialValue() {
return new ByteBufferProvider() {
ByteBufferWrapper wrapper = new ByteBufferWrapper(1000);
@Override
public ByteBufferWrapper getBuffer() {
return wrapper;
}
};
}
};
EpollServer<BufferedConnection> server = new EpollServer<BufferedConnection>(port) {
@Override
protected BufferedConnection createConnection(int fd, int ip, int port) {
return new BufferedConnection(fd, ip, port);
}
final byte[] data = "HTTP/1.1 200 OK\r\nConnection: Keep-Alive\r\nContent-Length: 5\r\nContent-Type: text/html;charset=UTF-8\r\n\r\nololo".getBytes();
// byte[] response = "HTTP/1.1 200 OK\r\nConnection: Close\r\nContent-Length: 5\r\nContent-Type: text/html;charset=UTF-8\r\n\r\nololo".getBytes();
// ReadableByteBuffer response = new ReadableByteBuffer(new ByteBufferWrapper(data));
@Override
protected IOThread<BufferedConnection> createIOThread(int number, int divider) {
return new IOThread<BufferedConnection>(number, divider) {
@Override
public void onRead(final BufferedConnection connection) {
// try {
// int r = connection.read(b, 0, b.length, this);
// System.out.println("read: " + r);
// System.out.println(new String(b, 0, r));
// connection.write(response.copy());
// System.out.println("on read");
if (poolSize == 0)
process(connection, this);
else
pool.add(new Runnable() {
@Override
public void run() {
process(connection, threadLocal.get());
}
});
}
private void process(BufferedConnection connection, ByteBufferProvider bufferProvider) {
try {
connection.count += connection.read(connection.buffer, connection.count, connection.buffer.length - connection.count, bufferProvider);
// System.out.println("read: "+connection.count);
} catch (IOException e) {
e.printStackTrace();
}
if (connection.count == 40) {// request size from wrk
connection.count = 0;
connection.write(data, bufferProvider);
}
}
};
}
};
server.setIoThreadsCount(2);
server.start();
Thread.sleep(25 * 60 * 1000);
server.stopEpoll();
}
@Test
public void maxEventsTest() throws InterruptedException {
final int port = 9092;
final AtomicInteger connections = new AtomicInteger();
EpollServer server = new EpollServer(null, port, 200) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onRead(Connection connection) {
try {
byte[] b = new byte[32];
int r = connection.read(b, 0, b.length, this);
// System.out.println("read: " + new String(b, 0, r));
connection.write(b, 0, r, this);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void onConnect(Connection connection) {
connections.incrementAndGet();
// System.out.println("onConnect " + connections.get());
}
@Override
public void onDisconnect(Connection connection) {
connections.decrementAndGet();
// System.out.println("onDisconnect " + connections.get());
}
};
}
};
server.setIoThreadsCount(1);
server.start();
final AtomicLong total = new AtomicLong(0);
long time = System.currentTimeMillis();
int threads = 100;
final int n = 10000;
final CountDownLatch latch = new CountDownLatch(threads);
final AtomicInteger counter = new AtomicInteger();
for (int j = 0; j < threads; j++) {
new Thread(new Runnable() {
@Override
public void run() {
try {
Socket s = new Socket("localhost", port);
OutputStream out = s.getOutputStream();
InputStream in = s.getInputStream();
byte[] b = new byte[1024];
byte[] hello = "hello world!".getBytes();
for (int i = 0; i < n; i++) {
// System.out.println("write");
out.write(hello);
out.flush();
// System.out.println("wait for response");
int r = in.read(b);
total.addAndGet(r);
// System.out.println("get response: " + new String(b, 0, r));
Assert.assertEquals("hello world!", new String(b, 0, r));
}
s.close();
counter.incrementAndGet();
} catch (IOException e) {
e.printStackTrace();
} finally {
latch.countDown();
}
}
}).start();
Thread.sleep(10);
}
latch.await();
Thread.sleep(100);
Assert.assertEquals(threads, counter.get());
Assert.assertEquals(0, connections.get());
System.out.println("total bytes were sent: " + total.get() * 2);
time = System.currentTimeMillis() - time;
System.out.println("for " + time + "ms");
System.out.println(total.get() * 1000.0 / time / 1024.0 / 1024.0);
server.stopEpoll();
}
private String getLocalIp() throws UnknownHostException, SocketException {
System.out.println("Your Host addr: " + InetAddress.getLocalHost().getHostAddress()); // often returns "127.0.0.1"
Enumeration<NetworkInterface> n = NetworkInterface.getNetworkInterfaces();
for (; n.hasMoreElements(); ) {
NetworkInterface e = n.nextElement();
Enumeration<InetAddress> a = e.getInetAddresses();
for (; a.hasMoreElements(); ) {
InetAddress addr = a.nextElement();
if (addr.getAddress().length == 4 && !addr.getHostAddress().startsWith("127"))
return addr.getHostAddress();
}
}
return null;
}
@Test
public void hostBindTest() throws InterruptedException, UnknownHostException, SocketException {
int port = 9090;
String host = getLocalIp();
// String host = "192.168.1.144";
EpollServer server = new EpollServer(host, port) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onRead(Connection connection) {
try {
byte[] b = new byte[1024];
int r = connection.read(b, 0, b.length, this);
connection.write(b, 0, r, this);
} catch (IOException e) {
e.printStackTrace();
}
}
};
}
};
server.start();
String message = null;
try {
new Socket("localhost", port);
} catch (IOException e) {
message = e.getMessage();
}
Assert.assertEquals("Connection refused", message);
try {
Socket s = new Socket(host, port);
OutputStream out = s.getOutputStream();
out.write("hello world!".getBytes());
InputStream in = s.getInputStream();
byte[] b = new byte[1024];
int r = in.read(b);
Assert.assertEquals("hello world!", new String(b, 0, r));
} catch (IOException e) {
e.printStackTrace();
}
server.stopEpoll();
}
@Test
public void testWriteEvents() throws IOException, InterruptedException {
int port = 9090;
String host = "localhost";
final byte[] data = new byte[10 * 1024 * 1024];
new Random().nextBytes(data);
String md5 = MD5.getMD5AsString(data);
EpollServer server = new EpollServer(host, port) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onConnect(Connection connection) {
connection.write(data, this);
}
};
}
@Override
protected Connection createConnection(int fd, int ip, int port) {
return new Connection(fd, ip, port);
}
};
server.start();
byte[] receive = new byte[10 * 1024 * 1024];
int offset = 0;
int r;
Socket socket = new Socket(host, port);
InputStream in = socket.getInputStream();
Thread.sleep(1000);
while ((r = in.read(receive, offset, receive.length - offset)) != -1) {
offset += r;
// System.out.println("read: "+r+"\tremaining: "+(receive.length - offset));
if (receive.length - offset == 0)
break;
}
Assert.assertEquals(md5, MD5.getMD5AsString(receive));
Assert.assertEquals(0, in.available());
socket.close();
server.stopEpoll();
}
@Test
public void testConnects() {
int port = 9090;
EpollServer server = new EpollServer(port) {
byte[] data = "HTTP/1.1 200 OK\r\nConnection: Close\r\nContent-Length: 2\r\nContent-Type: text/html;charset=UTF-8\r\n\r\nok".getBytes();
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
byte[] b = new byte[1024];
@Override
public void onRead(Connection connection) {
try {
int r = connection.read(b, 0, b.length, this);
// System.out.println(new String(b, 0, r));
// connection.write(response.copy());
connection.write(data, this);
// System.out.println("write response");
connection.close();
// System.out.println("close");
} catch (IOException e) {
e.printStackTrace();
assert e == null;
}
}
};
}
};
server.setIoThreadsCount(4);
server.start();
int i = 0;
int n = 10000;
Stopwatch stopwatch = new Stopwatch("time");
try {
while (true) {
Assert.assertEquals("ok", HttpClient.createRequest("http://localhost:9090")
.header("Connection", "Close")
.get().asString());
i++;
if (i == n)
break;
}
} catch (Exception e) {
System.out.println(i);
e.printStackTrace();
}
server.stopEpoll();
assert i == n;
System.out.println(stopwatch);
}
@Test
public void testAsyncWriteEvent() {
int port = 9090;
final AtomicReference<Connection> connectionRef = new AtomicReference<Connection>();
final AtomicInteger onWrite = new AtomicInteger();
EpollServer server = new EpollServer(port) {
@Override
protected IOThread createIOThread(int number, int divider) {
return new IOThread(number, divider) {
@Override
public void onConnect(Connection connection) {
connectionRef.set(connection);
}
@Override
public void onWrite(Connection connection) {
onWrite.incrementAndGet();
super.onWrite(connection);
}
};
}
};
server.start();
try {
int pause = 20;
Socket s = new Socket("localhost", port);
Thread.sleep(pause);
Assert.assertNotNull(connectionRef.get());
// connectionRef.get().enableOnWriteEvent();
// Thread.sleep(pause);
Assert.assertEquals(1, onWrite.get());
connectionRef.get().disableOnWriteEvent();
Thread.sleep(pause);
connectionRef.get().enableOnWriteEvent();
Thread.sleep(pause);
Assert.assertEquals(2, onWrite.get());
} catch (IOException e) {
e.printStackTrace();
assert e == null;
} catch (InterruptedException e) {
e.printStackTrace();
assert e == null;
}
server.stopEpoll();
}
}
|
package eme.generator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EOperation;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EParameter;
import org.junit.Before;
import org.junit.Test;
import eme.model.ExtractedClass;
import eme.model.ExtractedEnumeration;
import eme.model.ExtractedInterface;
import eme.model.ExtractedMethod;
import eme.model.ExtractedPackage;
import eme.model.IntermediateModel;
import eme.model.datatypes.AccessLevelModifier;
import eme.model.datatypes.ExtractedAttribute;
import eme.model.datatypes.ExtractedDataType;
import eme.model.datatypes.ExtractedParameter;
import eme.properties.ExtractionProperties;
import eme.properties.TestProperties;
public class EObjectGeneratorTest {
ExtractionProperties properties;
EObjectGenerator generator;
IntermediateModel model;
@Before
public void setUp() throws Exception {
properties = new TestProperties();
generator = new EObjectGenerator(properties);
model = new IntermediateModel("TestProject");
generator.prepareFor(model);
}
@Test
public void testGeneratePackage() {
ExtractedPackage root = new ExtractedPackage("");
root.setAsRoot();
root.add(new ExtractedPackage("model"));
root.add(new ExtractedPackage("view"));
root.add(new ExtractedPackage("controller"));
root.add(new ExtractedEnumeration("someEnum"));
EPackage result = generator.generateEPackage(root);
generator.completeGeneration();
assertNotNull(result);
assertEquals("DEFAULT", result.getName());
List<EPackage> subpackages = result.getESubpackages();
assertEquals(3, subpackages.size());
assertEquals("model", subpackages.get(0).getName());
assertEquals("view", subpackages.get(1).getName());
assertEquals("controller", subpackages.get(2).getName());
for (EPackage ePackage : subpackages) {
assertEquals(ePackage.getName(), ePackage.getNsPrefix());
}
}
@Test
public void testGenerateClass() {
ExtractedClass normalClass = new ExtractedClass("NormalClass", false);
EClass result = (EClass) generator.generateEClassifier(normalClass);
generator.completeGeneration();
assertEquals("NormalClass", result.getName());
assertFalse(result.isAbstract());
assertFalse(result.isInterface());
}
@Test
public void testGenerateSuperClass() {
ExtractedClass subClass = new ExtractedClass("SubClass", false);
ExtractedClass superClass = new ExtractedClass("SuperClass", false);
subClass.setSuperClass("SuperClass");
model.add(new ExtractedPackage(""));
model.add(subClass);
model.add(superClass);
EClass result = (EClass) generator.generateEClassifier(subClass);
generator.completeGeneration();
assertEquals("SubClass", result.getName());
assertEquals("SuperClass", result.getESuperTypes().get(0).getName());
}
@Test
public void testGenerateSuperInterface() {
ExtractedInterface subInterface = new ExtractedInterface("SubInterface");
ExtractedInterface superInterface = new ExtractedInterface("SuperInterface");
subInterface.addInterface("SuperInterface");
model.add(new ExtractedPackage(""));
model.add(subInterface);
model.add(superInterface);
EClass result = (EClass) generator.generateEClassifier(subInterface);
generator.completeGeneration();
assertEquals("SubInterface", result.getName());
assertEquals("SuperInterface", result.getESuperTypes().get(0).getName());
}
@Test
public void testGenerateAttribute() {
ExtractedClass testClass = new ExtractedClass("TestClass", false);
ExtractedAttribute attribute = new ExtractedAttribute("testAttribute", "java.lang.String", 0);
testClass.addAttribute(attribute);
EClass result = (EClass) generator.generateEClassifier(testClass);
generator.completeGeneration();
assertEquals(1, result.getEAttributes().size());
EAttribute eAttribute = result.getEAttributes().get(0);
assertEquals("testAttribute", eAttribute.getName());
assertEquals("EString", eAttribute.getEType().getName());
}
@Test
public void testGenerateMethod() {
ExtractedClass testClass = new ExtractedClass("TestClass", false);
ExtractedDataType returnType = new ExtractedDataType("java.lang.String", 0);
ExtractedParameter parameter = new ExtractedParameter("number", "int", 0);
ExtractedMethod method = new ExtractedMethod("testMethod", returnType, false);
method.setFlags(AccessLevelModifier.PUBLIC, false, false);
method.addParameter(parameter);
testClass.addMethod(method);
EClass result = (EClass) generator.generateEClassifier(testClass);
generator.completeGeneration();
assertEquals(1, result.getEOperations().size());
EOperation operation = result.getEOperations().get(0);
assertEquals("testMethod", operation.getName());
assertEquals("EString", operation.getEType().getName());
EParameter eParameter = operation.getEParameters().get(0);
assertEquals("number", eParameter.getName());
assertEquals("EInt", eParameter.getEType().getName());
}
@Test
public void testGenerateAbstractClass() {
ExtractedClass abstractClass = new ExtractedClass("abstractClass", true);
EClass result = (EClass) generator.generateEClassifier(abstractClass);
generator.completeGeneration();
assertEquals("abstractClass", result.getName());
assertTrue(result.isAbstract());
assertFalse(result.isInterface());
}
@Test
public void testGenerateInterface() {
ExtractedInterface extractedInterface = new ExtractedInterface("Interface");
EClass result = (EClass) generator.generateEClassifier(extractedInterface);
generator.completeGeneration();
assertEquals("Interface", result.getName());
assertTrue(result.isAbstract());
assertTrue(result.isInterface());
}
@Test
public void testGenerateEnum() {
ExtractedEnumeration enumeration = new ExtractedEnumeration("Enum");
for (int i = 0; i < 5; i++) {
enumeration.addEnumeral("ENUMERAL_" + i);
}
EEnum result = (EEnum) generator.generateEClassifier(enumeration);
generator.completeGeneration();
assertEquals("Enum", result.getName());
for (int i = 0; i < 5; i++) {
assertNotNull(result.getEEnumLiteral("ENUMERAL_" + i));
}
}
}
|
package integration.proxy;
import com.codeborne.selenide.WebDriverRunner;
import integration.IntegrationTest;
import io.netty.handler.codec.http.HttpResponse;
import net.lightbody.bmp.BrowserMobProxy;
import net.lightbody.bmp.BrowserMobProxyServer;
import net.lightbody.bmp.client.ClientUtil;
import net.lightbody.bmp.filters.ResponseFilter;
import net.lightbody.bmp.util.HttpMessageContents;
import net.lightbody.bmp.util.HttpMessageInfo;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.Proxy;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import static com.codeborne.selenide.Condition.text;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.close;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.*;
/**
* Selenide runs its own proxy server.
* User can also configure Selenide to use his proxy server (for Selenide, this is "chained" proxy).
*
* This test verifies that both these proxies work well together.
*/
public class ChainedProxyTest extends IntegrationTest {
static BrowserMobProxy chainedProxy;
List<String> visitedUrls = new ArrayList<>();
@Before
public void setUp() throws UnknownHostException {
if (chainedProxy == null) {
close();
chainedProxy = new BrowserMobProxyServer();
chainedProxy.setTrustAllServers(true);
chainedProxy.start(0);
chainedProxy.addResponseFilter(new ResponseFilter() {
@Override
public void filterResponse(HttpResponse response, HttpMessageContents contents, HttpMessageInfo messageInfo) {
System.out.println(response.getStatus().code());
visitedUrls.add(messageInfo.getUrl());
}
});
Proxy seleniumProxy = ClientUtil.createSeleniumProxy(chainedProxy);
WebDriverRunner.setProxy(seleniumProxy);
}
visitedUrls.clear();
}
@AfterClass
public static void tearDown() {
WebDriverRunner.setProxy(null);
close();
if (chainedProxy != null) {
chainedProxy.stop();
}
}
@Test
public void selenideProxyCanWorkWithUserProvidedChainedProxy() {
openFile("file_upload_form.html");
$("#cv").uploadFromClasspath("hello_world.txt");
$("#avatar").uploadFromClasspath("firebug-1.11.4.xpi");
$("#submit").click();
// Assert that files are actually uploaded via 2 proxies
$("h3").shouldHave(text("Uploaded 2 files"));
assertEquals(2, server.uploadedFiles.size());
// Assert that "chained" proxy has intercepted requests
assertTrue("Expected at least 2 urls, but got: " + visitedUrls, visitedUrls.size() >= 3);
assertThat(visitedUrls.get(0), containsString("/file_upload_form.html"));
assertThat(visitedUrls.get(visitedUrls.size() - 1), containsString("/upload"));
}
}
|
package io.cfp.api;
import io.cfp.mapper.CoSpeakerMapper;
import io.cfp.mapper.ProposalMapper;
import io.cfp.mapper.RateMapper;
import io.cfp.mapper.UserMapper;
import io.cfp.model.Proposal;
import io.cfp.model.Role;
import io.cfp.model.User;
import io.cfp.model.queries.ProposalQuery;
import io.cfp.service.PdfCardService;
import io.cfp.service.email.EmailingService;
import io.cfp.utils.Utils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@RunWith(SpringRunner.class)
@WebMvcTest(ProposalsController.class)
public class ProposalsControllerTest {
@MockBean
private ProposalMapper proposalMapper;
@MockBean
private UserMapper userMapper;
@MockBean
private RateMapper rateMapper;
@MockBean
private CoSpeakerMapper coSpeakerMapper;
@MockBean
private EmailingService emailingService;
@MockBean
private PdfCardService pdfCardService;
@Autowired
private MockMvc mockMvc;
private Proposal proposal;
@Before
public void setUp() {
User speaker = new User()
.setId(20)
.setEmail("EMAIL");
proposal = new Proposal()
.setId(10)
.setState(Proposal.State.ACCEPTED)
.setAdded(new Date())
.setDescription("DESCRIPTION")
.setEventId("EVENT_ID")
.setFormat(11)
.setLanguage("LANGUAGE")
.setDifficulty(1)
.setName("NAME")
.setReferences("REFERENCES")
.setRoomId(12)
.setSchedule(new Date())
.setSlides("SLIDES")
.setVideo("VIDEO")
.setTrackId(13)
.setTrackLabel("TRACK_LABEL")
.setSpeaker(speaker);
}
@Test
public void should_get_proposals() throws Exception {
List<Proposal> proposals = new ArrayList<>();
proposals.add(proposal);
when(proposalMapper.findAll(any(ProposalQuery.class))).thenReturn(proposals);
User user = new User();
user.setEmail("EMAIL");
user.addRole(Role.ADMIN);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
mockMvc.perform(get("/api/proposals")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
)
.andDo(print())
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$[0].id").value("10"))
;
}
@Test
public void should_get_proposal_by_id() throws Exception {
when(proposalMapper.findById(eq(10), anyString())).thenReturn(proposal);
User user = new User();
user.setId(20);
user.setEmail("EMAIL");
user.addRole(Role.ADMIN);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
mockMvc.perform(get("/api/proposals/10")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
)
.andDo(print())
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.id").value("10"))
.andExpect(jsonPath("$.state").value("ACCEPTED"))
.andExpect(jsonPath("$.description").value("DESCRIPTION"))
.andExpect(jsonPath("$.eventId").value("EVENT_ID"))
.andExpect(jsonPath("$.language").value("LANGUAGE"))
.andExpect(jsonPath("$.roomId").value("12"))
.andExpect(jsonPath("$.format").value("11"))
.andExpect(jsonPath("$.difficulty").value("1"))
.andExpect(jsonPath("$.name").value("NAME"))
.andExpect(jsonPath("$.references").value("REFERENCES"))
.andExpect(jsonPath("$.slides").value("SLIDES"))
.andExpect(jsonPath("$.video").value("VIDEO"))
.andExpect(jsonPath("$.trackId").value("13"))
.andExpect(jsonPath("$.trackLabel").value("TRACK_LABEL"))
.andExpect(jsonPath("$.speaker.id").value("20"))
;
}
@Test
public void should_not_authorise_anonymous_to_create_proposals() throws Exception {
String newProposal = Utils.getContent("/json/proposals/new_proposal.json");
mockMvc.perform(post("/api/proposals")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.content(newProposal)
)
.andDo(print())
.andExpect(status().isUnauthorized())
;
}
@Test
public void should_create_proposals() throws Exception {
User user = new User();
user.setId(20);
user.setEmail("EMAIL");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
String newProposal = Utils.getContent("/json/proposals/new_proposal.json");
mockMvc.perform(post("/api/proposals")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(newProposal)
)
.andDo(print())
.andExpect(status().isCreated())
;
}
@Test
public void should_not_create_invalid_proposals() throws Exception {
User user = new User();
user.setId(20);
user.setEmail("EMAIL");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
String invalidProposal = Utils.getContent("/json/proposals/invalid_proposal.json");
mockMvc.perform(post("/api/proposals")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(invalidProposal)
)
.andDo(print())
.andExpect(status().isBadRequest())
;
}
@Test
public void should_update_my_proposals() throws Exception {
User user = new User();
user.setId(20);
user.setEmail("EMAIL");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
String updatedProposal = Utils.getContent("/json/proposals/other_proposal.json");
mockMvc.perform(put("/api/proposals/25")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(updatedProposal)
)
.andDo(print())
.andExpect(status().isNoContent())
;
}
@Test
public void should_not_update_others_proposals() throws Exception {
User user = new User();
user.setId(21);
user.setEmail("EMAIL");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
String updatedProposal = Utils.getContent("/json/proposals/other_proposal.json");
mockMvc.perform(put("/api/proposals/25")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(updatedProposal)
)
.andDo(print())
.andExpect(status().isForbidden())
;
}
@Test
public void should_email_confirmation_when_proposal_is_confirmed() throws Exception {
User user = new User();
user.setId(21);
user.setEmail("EMAIL");
user.setFirstname("FIRSTNAME");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
Proposal proposal = new Proposal().setId(25)
.setName("PROPOSAL_NAME")
.setSpeaker(new User().setId(21));
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
when(proposalMapper.findById(eq(25), anyString())).thenReturn(proposal);
String updatedProposal = Utils.getContent("/json/proposals/other_proposal.json");
mockMvc.perform(put("/api/proposals/25/confirm")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(updatedProposal)
)
.andDo(print())
.andExpect(status().isNoContent())
;
verify(emailingService).sendConfirmed(eq(user), eq(proposal));
}
/* FIXME will need to make it clearer what we consider an "invalid proposal"
@Test
public void should_not_update_invalid_proposals() throws Exception {
User user = new User();
user.setId(21);
user.setEmail("EMAIL");
user.addRole(Role.AUTHENTICATED);
String token = Utils.createTokenForUser(user);
when(userMapper.findByEmail("EMAIL")).thenReturn(user);
String invalidProposal = Utils.getContent("/json/proposals/invalid_proposal.json");
mockMvc.perform(put("/api/proposals/25")
.accept(MediaType.APPLICATION_JSON_UTF8)
.contentType(MediaType.APPLICATION_JSON_UTF8)
.header("Authorization", "Bearer "+token)
.content(invalidProposal)
)
.andDo(print())
.andExpect(status().isBadRequest())
;
} */
}
|
package org.opencps.api.controller.impl;
import java.io.File;
import java.net.HttpURLConnection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.opencps.api.controller.SignatureManagement;
import org.opencps.api.controller.exception.ErrorMsg;
import org.opencps.api.digitalsignature.model.DigitalSignatureInputModel;
import org.opencps.auth.api.BackendAuth;
import org.opencps.auth.api.BackendAuthImpl;
import org.opencps.auth.api.exception.UnauthenticationException;
import org.opencps.auth.api.exception.UnauthorizationException;
import org.opencps.dossiermgt.action.DossierActions;
import org.opencps.dossiermgt.action.impl.DossierActionsImpl;
import org.opencps.dossiermgt.model.Dossier;
import org.opencps.dossiermgt.model.DossierFile;
import org.opencps.dossiermgt.model.DossierPart;
import org.opencps.dossiermgt.scheduler.InvokeREST;
import org.opencps.dossiermgt.scheduler.RESTFulConfiguration;
import org.opencps.dossiermgt.service.DossierFileLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierLocalServiceUtil;
import org.opencps.dossiermgt.service.DossierPartLocalServiceUtil;
import com.liferay.document.library.kernel.model.DLFileEntry;
import com.liferay.document.library.kernel.service.DLAppLocalServiceUtil;
import com.liferay.document.library.kernel.service.DLFileEntryLocalServiceUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.json.JSONFactoryUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.model.Company;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.servlet.HttpMethods;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.StringPool;
public class SignatureManagementImpl implements SignatureManagement{
Log _log = LogFactoryUtil.getLog(SignatureManagementImpl.class.getName());
@Override
public Response updateDossierFileBySignature(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, Long id, DigitalSignatureInputModel input) throws PortalException {
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
long dossierId = Long.valueOf(id);
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
String sign = input.getSign();
String signFieldName = input.getSignFieldName();
String fileName = input.getFileName();
_log.info("sign: "+sign);
_log.info("signFieldName: "+signFieldName);
_log.info("fileName: "+fileName);
String actionCode = input.getActionCode();
String actionUser = input.getActionUser();
String actionNote = input.getActionNote();
long assignUserId = Long.valueOf(input.getAssignUserId());
String subUsers = input.getSubUsers();
_log.info("actionCode: "+actionCode);
_log.info("actionUser: "+actionUser);
_log.info("actionNote: "+actionNote);
_log.info("assignUserId: "+assignUserId);
_log.info("subUsers: "+subUsers);
JSONObject signatureCompleted = callSignatureSync(groupId, user, id, sign, signFieldName, fileName, serviceContext);
JSONObject result = JSONFactoryUtil.createJSONObject();
if (signatureCompleted.getInt(RESTFulConfiguration.STATUS) == HttpURLConnection.HTTP_OK) {
long fileEntryId = Long.valueOf(input.getFileEntryId());
_log.info("fileEntryId: "+fileEntryId);
String message = signatureCompleted.getString(RESTFulConfiguration.MESSAGE);
_log.info("message: "+message);
JSONObject jsonData = JSONFactoryUtil.createJSONObject(message);
_log.info("jsonData: "+jsonData.toJSONString());
String fullPath = String.valueOf(jsonData.get("fullPath"));
_log.info("fullPath: "+fullPath);
File fileSigned = new File(fullPath.replace(".pdf", ".signed.pdf"));
_log.info("fileSigned Path: "+fileSigned.getAbsolutePath());
_log.info("fileSigned Name: "+fileSigned.getName());
DLFileEntry dlFileEntry = DLFileEntryLocalServiceUtil.fetchDLFileEntry(fileEntryId);
_log.info("dlFileEntry: "+dlFileEntry.getClassName());
_log.info("dlFileEntry: "+dlFileEntry.getFileName());
DLAppLocalServiceUtil.updateFileEntry(user.getUserId(), dlFileEntry.getFileEntryId(), dlFileEntry.getTitle(),
dlFileEntry.getMimeType(), dlFileEntry.getTitle(), dlFileEntry.getDescription(),
StringPool.BLANK, false, fileSigned, serviceContext);
//Next action
Dossier dossier = DossierLocalServiceUtil.fetchDossier(dossierId);
if (dossier != null) {
_log.info("dossierId: "+dossier.getDossierId());
_log.info("ReferenceId: "+dossier.getReferenceUid());
DossierActions dossierAction = new DossierActionsImpl();
dossierAction.doAction(groupId, dossierId, dossier.getReferenceUid(), actionCode,
0l, actionUser, actionNote, assignUserId, user.getUserId(), subUsers,
serviceContext);
// Process success
result.put("msg", "success");
}
}
return Response.status(200).entity(JSONFactoryUtil.looseSerialize(result)).build();
}
private JSONObject callSignatureSync(long groupId, User user, long id, String sign, String signFieldName, String fileName,
ServiceContext serviceContext) throws PortalException {
InvokeREST rest = new InvokeREST();
HashMap<String, String> properties = new HashMap<String, String>();
// Call initDossier to SERVER
String httpMethod = HttpMethods.POST;
String endPoint = "signature/completeSignature";
Map<String, Object> params = new HashMap<String, Object>();
params.put("sign", sign);
params.put("signFieldName", signFieldName);
params.put("fileName", fileName);
JSONObject resPostDossier = rest.callPostAPI(groupId, httpMethod, "application/json",
RESTFulConfiguration.SERVER_PATH_BASE, endPoint, RESTFulConfiguration.SERVER_USER,
RESTFulConfiguration.SERVER_PASS, properties, params, serviceContext);
return resPostDossier;
}
@Override
public Response getHashComputedBySignature(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, Long id, DigitalSignatureInputModel input) {
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
String strIdArr = input.getStrIdArr();
_log.info("array Id: "+strIdArr);
String[] idSplit = strIdArr.split(StringPool.SEMICOLON);
_log.info("idSplit Id: "+idSplit);
JSONObject hashComputed = null;
for (String strId : idSplit) {
String[] idArr = strId.split(StringPool.COMMA);
DossierPart dossierPart = DossierPartLocalServiceUtil.fetchDossierPart(Long.valueOf(idArr[1]));
// _log.info("Dossier Part: "+dossierPart);
DossierFile dossierFile = null;
if (dossierPart != null && dossierPart.getESign()) {
dossierFile = DossierFileLocalServiceUtil.fetchDossierFile(Long.valueOf(idArr[0]));
// _log.info("Dossier File: "+dossierFile);
if (dossierFile != null && dossierFile.getFileEntryId() > 0) {
long fileEntryId = dossierFile.getFileEntryId();
_log.info("fileEntryId: "+fileEntryId);
hashComputed = callHashComputedSync(groupId, user, fileEntryId, serviceContext);
_log.info("hashComputed: "+hashComputed);
break;
}
}
}
JSONObject results = JSONFactoryUtil.createJSONObject(hashComputed.getString(RESTFulConfiguration.MESSAGE));
_log.info("results: "+results);
// JSONObject result = null;
// result = JSONFactoryUtil.createJSONObject();
return Response.status(200).entity(JSONFactoryUtil.looseSerialize(results)).build();
} catch (Exception e) {
ErrorMsg error = new ErrorMsg();
if (e instanceof UnauthenticationException) {
error.setMessage("Non-Authoritative Information.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Non-Authoritative Information.");
return Response.status(HttpURLConnection.HTTP_NOT_AUTHORITATIVE).entity(error).build();
} else {
if (e instanceof UnauthorizationException) {
error.setMessage("Unauthorized.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Unauthorized.");
return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity(error).build();
} else {
error.setMessage("Internal Server Error");
error.setCode(HttpURLConnection.HTTP_FORBIDDEN);
error.setDescription(e.getMessage());
return Response.status(HttpURLConnection.HTTP_INTERNAL_ERROR).entity(error).build();
}
}
}
}
private JSONObject callHashComputedSync(long groupId, User user, long fileEntryId, ServiceContext serviceContext) throws PortalException {
InvokeREST rest = new InvokeREST();
HashMap<String, String> properties = new HashMap<String, String>();
// Call initDossier to SERVER
String httpMethod = HttpMethods.POST;
String endPoint = "signature/requestsToken";
Map<String, Object> params = new HashMap<String, Object>();
params.put("fileEntryId", fileEntryId);
params.put("emailUser", user.getEmailAddress());
JSONObject resPostHashComputed = rest.callPostAPI(groupId, httpMethod, "application/json",
RESTFulConfiguration.SERVER_PATH_BASE, endPoint, RESTFulConfiguration.SERVER_USER,
RESTFulConfiguration.SERVER_PASS, properties, params, serviceContext);
return resPostHashComputed;
}
}
|
/* @java.file.header */
package org.gridgain.grid.util.typedef;
import org.gridgain.grid.*;
import org.gridgain.grid.util.*;
import org.gridgain.grid.util.typedef.internal.*;
import org.jetbrains.annotations.*;
import java.io.*;
import java.lang.reflect.Array;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
/**
* Defines global scope.
* <p>
* Contains often used utility functions allowing to cut down on code bloat. This
* is somewhat analogous to {@code Predef} in Scala. Note that this should only be used
* when this typedef <b>does not sacrifice</b> the code readability.
*/
public final class X {
/** An empty immutable <code>Object</code> array. */
public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
/** Time span dividers. */
private static final long[] SPAN_DIVS = new long[] {1000L, 60L, 60L, 60L};
/** The names of methods commonly used to access a wrapped exception. */
private static final String[] CAUSE_MTD_NAMES = new String[] {
"getCause",
"getNextException",
"getTargetException",
"getException",
"getSourceException",
"getRootCause",
"getCausedByException",
"getNested",
"getLinkedException",
"getNestedException",
"getLinkedCause",
"getThrowable"
};
/** The Method object for Java 1.4 getCause. */
private static final Method THROWABLE_CAUSE_METHOD;
static {
Method causeMtd;
try {
causeMtd = Throwable.class.getMethod("getCause", null);
}
catch (Exception ignored) {
causeMtd = null;
}
THROWABLE_CAUSE_METHOD = causeMtd;
}
/**
* Ensures singleton.
*/
private X() {
// No-op.
}
/**
* Alias for {@code System.out.println()}.
*/
public static void println() {
System.out.println();
}
/**
* Alias for {@code System.err.println()}.
*/
public static void printerrln() {
System.err.println();
}
/**
* Alias for {@code System.out.println}.
*
* @param s1 First string to print.
* @param rest Optional list of objects to print as well.
*/
public static void println(@Nullable String s1, @Nullable Object... rest) {
System.out.println(s1);
if (rest != null && rest.length > 0)
for (Object obj : rest)
System.out.println(obj);
}
/**
* Alias for {@code System.err.println}.
*
* @param s1 First string to print.
* @param rest Optional list of objects to print as well.
*/
public static void printerrln(@Nullable String s1, @Nullable Object... rest) {
error(s1, rest);
}
/**
* Alias for {@code System.err.println}.
*
* @param s1 First string to print.
* @param rest Optional list of objects to print as well.
*/
public static void error(@Nullable String s1, @Nullable Object... rest) {
System.err.println(s1);
if (rest != null && rest.length > 0)
for (Object obj : rest)
System.err.println(obj);
}
/**
* Alias for {@code System.out.print}.
*
* @param s1 First string to print.
* @param rest Optional list of objects to print as well.
*/
public static void print(@Nullable String s1, @Nullable Object... rest) {
System.out.print(s1);
if (rest != null && rest.length > 0)
for (Object obj : rest)
System.out.print(obj);
}
/**
* Alias for {@code System.err.print}.
*
* @param s1 First string to print.
* @param rest Optional list of objects to print as well.
*/
public static void printerr(@Nullable String s1, @Nullable Object... rest) {
System.err.print(s1);
if (rest != null && rest.length > 0)
for (Object obj : rest)
System.err.print(obj);
}
/**
* Gets either system property or environment variable with given name.
*
* @param name Name of the system property or environment variable.
* @return Value of the system property or environment variable. Returns
* {@code null} if neither can be found for given name.
*/
@Nullable public static String getSystemOrEnv(String name) {
assert name != null;
String v = System.getProperty(name);
if (v == null)
v = System.getenv(name);
return v;
}
/**
* Gets either system property or environment variable with given name.
*
* @param name Name of the system property or environment variable.
* @param dflt Default value.
* @return Value of the system property or environment variable. Returns
* {@code null} if neither can be found for given name.
*/
@Nullable public static String getSystemOrEnv(String name, String dflt) {
assert name != null;
String v = getSystemOrEnv(name);
return F.isEmpty(v) ? dflt : v;
}
/**
* Creates string presentation of given time {@code span} in hh:mm:ss:msec {@code HMSM} format.
*
* @param span Time span.
* @return String presentation.
*/
public static String timeSpan2HMSM(long span) {
long[] t = new long[4];
long sp = span;
for (int i = 0; i < SPAN_DIVS.length && sp > 0; sp /= SPAN_DIVS[i++])
t[i] = sp % SPAN_DIVS[i];
return (t[3] < 10 ? "0" + t[3] : Long.toString(t[3])) + ':' +
(t[2] < 10 ? "0" + t[2] : Long.toString(t[2])) + ':' +
(t[1] < 10 ? "0" + t[1] : Long.toString(t[1])) + ':' +
(t[0] < 10 ? "0" + t[0] : Long.toString(t[0]));
}
/**
* Creates string presentation of given time {@code span} in hh:mm:ss {@code HMS} format.
*
* @param span Time span.
* @return String presentation.
*/
public static String timeSpan2HMS(long span) {
long[] t = new long[4];
long sp = span;
for (int i = 0; i < SPAN_DIVS.length && sp > 0; sp /= SPAN_DIVS[i++])
t[i] = sp % SPAN_DIVS[i];
return (t[3] < 10 ? "0" + t[3] : Long.toString(t[3])) + ':' +
(t[2] < 10 ? "0" + t[2] : Long.toString(t[2])) + ':' +
(t[1] < 10 ? "0" + t[1] : Long.toString(t[1]));
}
/**
* Clones a passed in object. If parameter {@code deep} is set to {@code true}
* then this method will use deep cloning algorithm based on deep reflection
* ignoring {@link Cloneable} interface unless parameter {@code honorCloneable}
* is set to false.
* <p>
* If {@code deep} is {@code false} then this method will check the object for
* {@link Cloneable} interface and use {@link Object#clone()} to make a copy,
* otherwise the object itself will be returned.
*
* @param obj Object to create a clone from.
* @param deep {@code true} to use algorithm of deep cloning. If {@code false}
* then this method will always be checking whether a passed in object
* implements interface {@link Cloneable} and if it does then method
* {@link Object#clone()} will be used to clone object, if does not
* then the object itself will be returned.
* @param honorCloneable Flag indicating whether {@link Cloneable} interface
* should be honored or not when cloning. This parameter makes sense only if
* parameter {@code deep} is set to {@code true}.
* @param <T> Type of cloning object.
* @return Copy of a passed in object.
*/
@SuppressWarnings({"unchecked"})
@Nullable public static <T> T cloneObject(@Nullable T obj, boolean deep, boolean honorCloneable) {
if (obj == null)
return null;
try {
return !deep ? shallowClone(obj) : (T)deepClone(new GridLeanMap<Integer, Integer>(),
new ArrayList<>(), obj, honorCloneable);
}
catch (Throwable e) {
throw new GridRuntimeException("Unable to clone instance of class: " + obj.getClass(), e);
}
}
/**
* @param obj Object to make a clone for.
* @param <T> Type of cloning object.
* @return Copy of a passed in object.
*/
@SuppressWarnings({"unchecked"})
@Nullable private static <T> T shallowClone(@Nullable T obj) {
if (obj == null)
return null;
if (!(obj instanceof Cloneable))
return obj;
if (obj.getClass().isArray())
return obj instanceof byte[] ? (T)(((byte[])obj).clone()) :
obj instanceof short[] ? (T)(((short[])obj).clone()) :
obj instanceof char[] ? (T)(((char[])obj).clone()) :
obj instanceof int[] ? (T)(((int[])obj).clone()) :
obj instanceof long[] ? (T)(((long[])obj).clone()) :
obj instanceof float[] ? (T)(((float[])obj).clone()) :
obj instanceof double[] ? (T)(((double[])obj).clone()) :
obj instanceof boolean[] ? (T)(((boolean[])obj).clone()) :
(T)(((Object[])obj).clone());
try {
// 'getDeclaredMethods' searches for ALL methods, 'getMethods' - only public methods.
Method mtd = obj.getClass().getDeclaredMethod("clone");
boolean set = false;
if (!mtd.isAccessible())
mtd.setAccessible(set = true);
T clone = (T)mtd.invoke(obj);
if (set)
mtd.setAccessible(false);
return clone;
}
catch (Exception e) {
throw new GridRuntimeException("Unable to clone instance of class: " + obj.getClass(), e);
}
}
/**
* Recursively clones the object.
*
* @param identityIdxs Map of object identities to indexes in {@code clones} parameter.
* @param clones List of already cloned objects.
* @param obj The object to deep-clone.
* @param honorCloneable {@code true} if method should account {@link Cloneable} interface.
* @return Clone of the input object.
* @throws Exception If deep-cloning fails.
*/
@Nullable private static Object deepClone(Map<Integer, Integer> identityIdxs, List<Object> clones, @Nullable Object obj,
boolean honorCloneable) throws Exception {
if (obj == null)
return null;
if (honorCloneable && obj instanceof Cloneable)
return shallowClone(obj);
Integer idx = identityIdxs.get(System.identityHashCode(obj));
Object clone = null;
if (idx != null)
clone = clones.get(idx);
if (clone != null)
return clone;
if (obj instanceof Class)
// No clone needed for java.lang.Class instance.
return obj;
Class cls = obj.getClass();
if (cls.isArray()) {
Class<?> arrType = cls.getComponentType();
int len = Array.getLength(obj);
clone = Array.newInstance(arrType, len);
for (int i = 0; i < len; i++)
Array.set(clone, i, deepClone(identityIdxs, clones, Array.get(obj, i), honorCloneable));
clones.add(clone);
identityIdxs.put(System.identityHashCode(obj), clones.size() - 1);
return clone;
}
clone = U.forceNewInstance(cls);
if (clone == null)
throw new GridRuntimeException("Failed to clone object (empty constructor could not be assigned): " + obj);
clones.add(clone);
identityIdxs.put(System.identityHashCode(obj), clones.size() - 1);
for (Class<?> c = cls; c != Object.class; c = c.getSuperclass())
for (Field f : c.getDeclaredFields())
cloneField(identityIdxs, clones, obj, clone, f, honorCloneable);
return clone;
}
/**
* @param identityIdxs Map of object identities to indexes in {@code clones} parameter.
* @param clones List of already cloned objects.
* @param obj Object to clone.
* @param clone Clone.
* @param f Field to clone.
* @param honorCloneable {@code true} if method should account {@link Cloneable} interface.
* @throws Exception If failed.
*/
private static void cloneField(Map<Integer, Integer> identityIdxs, List<Object> clones, Object obj, Object clone,
Field f, boolean honorCloneable) throws Exception {
int modifiers = f.getModifiers();
// Skip over static fields.
if (Modifier.isStatic(modifiers))
return;
boolean set = false;
if (!f.isAccessible()) {
f.setAccessible(true);
set = true;
}
try {
if (f.getType().isPrimitive())
f.set(clone, f.get(obj));
else
f.set(clone, deepClone(identityIdxs, clones, f.get(obj), honorCloneable));
}
finally {
if (set)
f.setAccessible(false);
}
}
/**
* Checks if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy <b>including</b> that
* throwable itself. <p> Note that this method follows includes {@link Throwable#getSuppressed()} into check.
*
* @param t Throwable to check (if {@code null}, {@code false} is returned).
* @param cls Cause classes to check (if {@code null} or empty, {@code false} is returned).
* @return {@code True} if one of the causing exception is an instance of passed in classes, {@code false}
* otherwise.
*/
public static boolean hasCause(@Nullable Throwable t, @Nullable Class<? extends Throwable>... cls) {
if (t == null || F.isEmpty(cls))
return false;
assert cls != null;
for (Throwable th = t; th != null; th = th.getCause()) {
for (Class<? extends Throwable> c : cls)
if (c.isAssignableFrom(th.getClass()))
return true;
for (Throwable n : th.getSuppressed())
if (hasCause(n, cls))
return true;
if (th.getCause() == th)
break;
}
return false;
}
/**
* Checks if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy <b>excluding</b> that
* throwable itself. <p> Note that this method follows includes {@link Throwable#getSuppressed()} into check.
*
* @param t Throwable to check (if {@code null}, {@code false} is returned).
* @param cls Cause classes to check (if {@code null} or empty, {@code false} is returned).
* @return {@code True} if one of the causing exception is an instance of passed in classes,
* {@code false} otherwise.
*/
public static boolean hasCauseExcludeRoot(@Nullable Throwable t, @Nullable Class<? extends Throwable>... cls) {
if (t == null || F.isEmpty(cls))
return false;
assert cls != null;
for (Throwable th = t.getCause(); th != null; th = th.getCause()) {
for (Class<? extends Throwable> c : cls)
if (c.isAssignableFrom(th.getClass()))
return true;
if (th.getCause() == th)
break;
}
for (Throwable n : t.getSuppressed())
if (hasCause(n, cls))
return true;
return false;
}
/**
* Gets first cause if passed in {@code 'Throwable'} has given class in {@code 'cause'} hierarchy.
*
* Note that this method follows includes {@link Throwable#getSuppressed()} into check.
*
* @param t Throwable to check (if {@code null}, {@code null} is returned).
* @param cls Cause class to get cause (if {@code null}, {@code null} is returned).
* @return First causing exception of passed in class, {@code null} otherwise.
*/
@SuppressWarnings({"unchecked"})
@Nullable public static <T extends Throwable> T cause(@Nullable Throwable t, @Nullable Class<T> cls) {
if (t == null || cls == null)
return null;
for (Throwable th = t; th != null; th = th.getCause()) {
if (cls.isAssignableFrom(th.getClass()))
return (T)th;
for (Throwable n : th.getSuppressed()) {
T found = cause(n, cls);
if (found != null)
return found;
}
if (th.getCause() == th)
break;
}
return null;
}
/**
* Finds a <code>Throwable</code> for known types.
*
* <p>Uses <code>instanceof</code> checks to examine the exception, looking for well known types which could contain
* chained or wrapped exceptions.</p>
*
* @param throwable the exception to examine
* @return the wrapped exception, or <code>null</code> if not found
*/
private static Throwable getCauseUsingWellKnownTypes(Throwable throwable) {
if (throwable instanceof SQLException)
return ((SQLException)throwable).getNextException();
if (throwable instanceof InvocationTargetException)
return ((InvocationTargetException)throwable).getTargetException();
return null;
}
/**
* Finds a <code>Throwable</code> by method name.
*
* @param throwable the exception to examine
* @param mtdName the name of the method to find and invoke
* @return the wrapped exception, or <code>null</code> if not found
*/
private static Throwable getCauseUsingMethodName(Throwable throwable, String mtdName) {
Method mtd = null;
try {
mtd = throwable.getClass().getMethod(mtdName, null);
}
catch (NoSuchMethodException | SecurityException ignored) {
// exception ignored
}
if (mtd != null && Throwable.class.isAssignableFrom(mtd.getReturnType())) {
try {
return (Throwable)mtd.invoke(throwable, EMPTY_OBJECT_ARRAY);
}
catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ignored) {
// exception ignored
}
}
return null;
}
/**
* Finds a <code>Throwable</code> by field name.
*
* @param throwable the exception to examine
* @param fieldName the name of the attribute to examine
* @return the wrapped exception, or <code>null</code> if not found
*/
private static Throwable getCauseUsingFieldName(Throwable throwable, String fieldName) {
Field field = null;
try {
field = throwable.getClass().getField(fieldName);
}
catch (NoSuchFieldException | SecurityException ignored) {
// exception ignored
}
if (field != null && Throwable.class.isAssignableFrom(field.getType())) {
try {
return (Throwable)field.get(throwable);
}
catch (IllegalAccessException | IllegalArgumentException ignored) {
// exception ignored
}
}
return null;
}
/**
* Checks if the Throwable class has a <code>getCause</code> method.
*
* This is true for JDK 1.4 and above.
*
* @return true if Throwable is nestable.
*/
public static boolean isThrowableNested() {
return THROWABLE_CAUSE_METHOD != null;
}
/**
* Checks whether this <code>Throwable</code> class can store a cause.
*
* This method does <b>not</b> check whether it actually does store a cause.
*
* @param throwable the <code>Throwable</code> to examine, may be null.
* @return boolean <code>true</code> if nested otherwise <code>false</code>.
*/
public static boolean isNestedThrowable(Throwable throwable) {
if (throwable == null)
return false;
if (throwable instanceof SQLException || throwable instanceof InvocationTargetException)
return true;
if (isThrowableNested())
return true;
Class<?> cls = throwable.getClass();
for (String CAUSE_MTD_NAME : CAUSE_MTD_NAMES) {
try {
Method mtd = cls.getMethod(CAUSE_MTD_NAME, null);
if (mtd != null && Throwable.class.isAssignableFrom(mtd.getReturnType())) {
return true;
}
}
catch (NoSuchMethodException | SecurityException ignored) {
// exception ignored
}
}
try {
Field field = cls.getField("detail");
if (field != null)
return true;
}
catch (NoSuchFieldException | SecurityException ignored) {
// exception ignored
}
return false;
}
/**
* Introspects the <code>Throwable</code> to obtain the cause.
*
* The method searches for methods with specific names that return a <code>Throwable</code> object.
* This will pick up most wrapping exceptions, including those from JDK 1.4.
*
* The default list searched for are:</p> <ul> <li><code>getCause()</code></li>
* <li><code>getNextException()</code></li> <li><code>getTargetException()</code></li>
* <li><code>getException()</code></li> <li><code>getSourceException()</code></li>
* <li><code>getRootCause()</code></li> <li><code>getCausedByException()</code></li>
* <li><code>getNested()</code></li> </ul>
*
* <p>In the absence of any such method, the object is inspected for a <code>detail</code>
* field assignable to a <code>Throwable</code>.</p>
*
* <p>If none of the above is found, returns <code>null</code>.
*
* @param throwable the throwable to introspect for a cause, may be null.
* @return the cause of the <code>Throwable</code>,
* <code>null</code> if none found or null throwable input.
*/
public static Throwable getCause(Throwable throwable) {
return getCause(throwable, CAUSE_MTD_NAMES);
}
/**
* Introspects the <code>Throwable</code> to obtain the cause.
*
* <ol> <li>Try known exception types.</li> <li>Try the supplied array of method names.</li> <li>Try the field
* 'detail'.</li> </ol>
*
* <p>A <code>null</code> set of method names means use the default set. A <code>null</code> in the set of method
* names will be ignored.</p>
*
* @param throwable the throwable to introspect for a cause, may be null.
* @param mtdNames the method names, null treated as default set.
* @return the cause of the <code>Throwable</code>, <code>null</code> if none found or null throwable input.
*/
public static Throwable getCause(Throwable throwable, String[] mtdNames) {
if (throwable == null)
return null;
Throwable cause = getCauseUsingWellKnownTypes(throwable);
if (cause == null) {
if (mtdNames == null)
mtdNames = CAUSE_MTD_NAMES;
for (String mtdName : mtdNames) {
if (mtdName != null) {
cause = getCauseUsingMethodName(throwable, mtdName);
if (cause != null)
break;
}
}
if (cause == null)
cause = getCauseUsingFieldName(throwable, "detail");
}
return cause;
}
/**
* Returns the list of <code>Throwable</code> objects in the exception chain.
*
* <p>A throwable without cause will return a list containing one element - the input throwable. A throwable with
* one cause will return a list containing two elements. - the input throwable and the cause throwable. A
* <code>null</code> throwable will return a list of size zero.</p>
*
* <p>This method handles recursive cause structures that might otherwise cause infinite loops. The cause chain is
* processed until the end is reached, or until the next item in the chain is already in the result set.</p>
*
* @param throwable the throwable to inspect, may be null
* @return the list of throwables, never null
*/
public static List<Throwable> getThrowableList(Throwable throwable) {
List<Throwable> list = new ArrayList<>();
while (throwable != null && !list.contains(throwable)) {
list.add(throwable);
throwable = getCause(throwable);
}
return list;
}
/**
* Returns the list of <code>Throwable</code> objects in the exception chain.
*
* A throwable without cause will return an array containing one element - the input throwable.
* A throwable with one cause will return an array containing two elements - the input throwable
* and the cause throwable. A <code>null</code> throwable will return an array of size zero.
*
* @param throwable the throwable to inspect, may be null.
* @return the array of throwables, never null.
* @see #getThrowableList(Throwable)
*/
public static Throwable[] getThrowables(Throwable throwable) {
List<Throwable> list = getThrowableList(throwable);
return list.toArray(new Throwable[list.size()]);
}
/**
* A way to get the entire nested stack-trace of an throwable.
*
* <p>The result of this method is highly dependent on the JDK version and whether the exceptions override
* printStackTrace or not.</p>
*
* @param throwable the <code>Throwable</code> to be examined
* @return the nested stack trace, with the root cause first
*/
public static String getFullStackTrace(Throwable throwable) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
Throwable[] ts = getThrowables(throwable);
for (Throwable t : ts) {
t.printStackTrace(pw);
if (isNestedThrowable(t))
break;
}
return sw.getBuffer().toString();
}
/**
* Synchronously waits for all futures in the collection.
*
* @param futs Futures to wait for.
* @throws GridException If any of the futures threw exception.
*/
public static void waitAll(@Nullable Iterable<GridFuture<?>> futs) throws GridException {
if (F.isEmpty(futs))
return;
for (GridFuture fut : futs)
fut.get();
}
/**
* Pretty-formatting for minutes.
*
* @param mins Minutes to format.
* @return Formatted presentation of minutes.
*/
public static String formatMins(long mins) {
assert mins >= 0;
if (mins == 0)
return "< 1 min";
SB sb = new SB();
long dd = mins / 1440; // 1440 mins = 60 mins * 24 hours
if (dd > 0)
sb.a(dd).a(dd == 1 ? " day " : " days ");
mins %= 1440;
long hh = mins / 60;
if (hh > 0)
sb.a(hh).a(hh == 1 ? " hour " : " hours ");
mins %= 60;
if (mins > 0)
sb.a(mins).a(mins == 1 ? " min " : " mins ");
return sb.toString().trim();
}
/**
* Exits with code {@code -1} if maximum memory is below 90% of minimally allowed threshold.
*
* @param min Minimum memory threshold.
*/
public static void checkMinMemory(long min) {
long maxMem = Runtime.getRuntime().maxMemory();
if (maxMem < .85 * min) {
printerrln("Heap limit is too low (" + (maxMem / (1024 * 1024)) +
"MB), please increase heap size at least up to " + (min / (1024 * 1024)) + "MB.");
System.exit(-1);
}
}
/**
* Copies input byte stream to output byte stream.
*
* @param in Input byte stream.
* @param out Output byte stream.
* @param bufSize Intermediate buffer size.
* @return Number of the copied bytes.
* @throws IOException Thrown if an I/O error occurs.
*/
public static int copy(InputStream in, OutputStream out, int bufSize) throws IOException {
byte[] buf = new byte[bufSize];
int cnt = 0;
for (int n; (n = in.read(buf)) > 0;) {
out.write(buf, 0, n);
cnt += n;
}
return cnt;
}
/**
* Tries to resolve GridGain installation home folder.
*
* @return Installation home folder.
* @throws GridException If GridGain home folder was not set.
*/
public static String resolveGridGainHome() throws GridException {
String var = getSystemOrEnv("GRIDGAIN_HOME");
if (var != null)
return var;
else
throw new GridException("Failed to resolve GridGain home folder " +
"(please set 'GRIDGAIN_HOME' environment or system variable)");
}
/**
* Parses double from possibly {@code null} or invalid string.
*
* @param s String to parse double from. If string is null or invalid, a default value is used.
* @param dflt Default value for double, if parsing failed.
* @return Resulting double.
*/
public static double parseDouble(@Nullable String s, double dflt) {
try {
return s != null ? Double.parseDouble(s) : dflt;
}
catch (NumberFormatException ignored) {
return dflt;
}
}
}
|
package org.ow2.chameleon.fuchsia.mqtt.test;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.Factory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerMethod;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.event.EventConstants;
import org.osgi.service.event.EventHandler;
import org.ow2.chameleon.fuchsia.core.FuchsiaConstants;
import org.ow2.chameleon.fuchsia.core.declaration.Constants;
import org.ow2.chameleon.fuchsia.core.declaration.ImportDeclaration;
import org.ow2.chameleon.fuchsia.mqtt.test.dao.MessageHandler;
import org.ow2.chameleon.fuchsia.mqtt.test.util.RabbitMQTestSuite;
import org.ow2.chameleon.fuchsia.tools.shell.FuchsiaGogoCommand;
import org.ow2.chameleon.testing.helpers.IPOJOHelper;
import javax.inject.Inject;
import java.io.IOException;
import java.util.*;
import static org.fest.assertions.Assertions.assertThat;
import static org.mockito.Mockito.*;
import static org.ow2.chameleon.fuchsia.core.ImportationLinker.FILTER_IMPORTDECLARATION_PROPERTY;
import static org.ow2.chameleon.fuchsia.core.ImportationLinker.FILTER_IMPORTERSERVICE_PROPERTY;
/**
* Test class MQTT
*
* @author botelho@imag.fr
*/
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerMethod.class)
//@RunWith(PaxExam.class)
//@ExamReactorStrategy(PerMethod.class)
public class AMPQMessageCaptureTest extends RabbitMQTestSuite {
@Inject
EventAdmin eventAdmin;
@Inject
FuchsiaGogoCommand command;
ComponentInstance linkerComponentInstance;
ComponentInstance importerComponentInstance;
protected IPOJOHelper ipojoHelper;
@Before
public void instantiateAMPQPlaform(){
assertRabbitMQisRunning();
ipojoHelper=new IPOJOHelper(bundleContext);
Properties linker=new Properties();
linker.put(FILTER_IMPORTDECLARATION_PROPERTY,"(id=*)");
linker.put(FILTER_IMPORTERSERVICE_PROPERTY,"(instance.name=AMQPImporter)");
linker.put(Factory.INSTANCE_NAME_PROPERTY,"MQTTLinker");
linkerComponentInstance=ipojoHelper.createComponentInstance(FuchsiaConstants.DEFAULT_IMPORTATION_LINKER_FACTORY_NAME,linker);
Properties importer=new Properties();
importer.put(FILTER_IMPORTDECLARATION_PROPERTY,"(id=*)");
importer.put("target","(id=*)");
importer.put(Factory.INSTANCE_NAME_PROPERTY,"AMQPImporter");
importerComponentInstance=ipojoHelper.createComponentInstance("AMQPImporterFactory",importer);
}
@After
public void uninstantiateAMPQPlatform(){
linkerComponentInstance.dispose();
importerComponentInstance.dispose();
}
@Test
public void testLinkerImporterCreated() {
ComponentInstance linkerInstance = ipojoHelper.getInstanceByName("MQTTLinker");
assertThat(linkerInstance).isNotNull();
}
@Test
public void testImporterCreated() {
ComponentInstance importerInstance = ipojoHelper.getInstanceByName("AMQPImporter");
assertThat(importerInstance).isNotNull();
}
@Test
public void ConsumeSingleMessageEventAdmin() throws IOException {
Dictionary handlerProperties = new Hashtable();
handlerProperties.put(EventConstants.EVENT_TOPIC, "public");
MessageHandler ht=new MessageHandler();
MessageHandler htmock=spy(ht);
bundleContext.registerService(EventHandler.class.getName(),htmock,handlerProperties);
HashMap<String, Object> metadata = new HashMap<String, Object>();
metadata.put(Constants.DEVICE_ID, "00000000-54b3-e7c7-0000-000046bffd97");
metadata.put("mqtt.queue","public");
ImportDeclaration declaration=createImportationDeclaration("importDeclaration",metadata);
assertThat(declaration).isNotNull();
sendSampleAMPQMessage();
verify(htmock,times(1)).handleEvent(any(Event.class));
}
@Test
public void ConsumeMultipleMessageEventAdmin() throws IOException {
Dictionary handlerProperties = new Hashtable();
handlerProperties.put(EventConstants.EVENT_TOPIC, "public");
MessageHandler ht=new MessageHandler();
MessageHandler htmock=spy(ht);
bundleContext.registerService(EventHandler.class.getName(),htmock,handlerProperties);
HashMap<String, Object> metadata = new HashMap<String, Object>();
metadata.put(Constants.DEVICE_ID, "00000000-54b3-e7c7-0000-000046bffd97");
metadata.put("mqtt.queue","public");
ImportDeclaration declaration=createImportationDeclaration("importDeclaration",metadata);
assertThat(declaration).isNotNull();
final int TOTAL=10;
for(int counter=0;counter<TOTAL;counter++){
sendSampleAMPQMessage();
}
verify(htmock,times(TOTAL)).handleEvent(any(Event.class));
}
@Test
public void MessageSentWithRightArgument() throws IOException {
final String queue="public";
Dictionary handlerProperties = new Hashtable();
handlerProperties.put(EventConstants.EVENT_TOPIC,queue);
MessageHandler ht=new MessageHandler();
MessageHandler htmock=spy(ht);
bundleContext.registerService(EventHandler.class.getName(),htmock,handlerProperties);
HashMap<String, Object> metadata = new HashMap<String, Object>();
metadata.put(Constants.DEVICE_ID, "00000000-54b3-e7c7-0000-000046bffd99");
metadata.put("mqtt.queue",queue);
ImportDeclaration declaration=createImportationDeclaration("importDeclaration",metadata);
assertThat(declaration).isNotNull();
sendSampleAMPQMessage();
ArgumentCaptor<Event> argument = ArgumentCaptor.forClass(Event.class);
verify(htmock,times(1)).handleEvent(argument.capture());
assertThat(argument.getValue().getTopic()).isEqualTo(queue);
}
private void sendSampleAMPQMessage() throws IOException {
ConnectionFactory factory = new ConnectionFactory();
Connection connection = factory.newConnection();
Channel channel = connection.createChannel();
final String topic="public";
final String quote = "The force of mind is only as great as its expression; its depth only as deep as its power to expand and lose itself";
getLogger().info("<eventadmin type='outbound'>");
getLogger().info("\tTOPIC: {}",topic);
getLogger().info("\tQuote: {}",quote);
getLogger().info("</eventadmin>\n");
channel.basicPublish("", topic, null, quote.getBytes());
}
}
|
package org.b3log.symphony;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.eclipse.jetty.util.component.LifeCycle;
import org.eclipse.jetty.webapp.WebAppContext;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
/**
* @author trydofor
* @since 2017-09-25
*/
public class JettyTestServer {
public void start(int port, boolean useMain) throws Exception {
URLClassLoader classLoader = (URLClassLoader) JettyTestServer.class.getClassLoader();
String projectPath = null;
String testClassDir = "target/test-classes";
URL[] urlTest = classLoader.getURLs();
for (URL url : urlTest) {
String u = url.getPath();
int p = u.lastIndexOf(testClassDir);
if (p > 0) {
projectPath = u.substring(0, p);
break;
}
}
// useMain : delete test resources, eg symphony.properties
if (useMain && projectPath != null) {
File[] mainRes = new File(projectPath + "src/main/resources").listFiles();
if (mainRes != null) {
File testClzDir = new File(projectPath + testClassDir);
for (File res : mainRes) {
if (res.isDirectory()) continue;
File t = new File(testClzDir, res.getName());
if (t.isFile()) t.delete();
}
}
}
if(projectPath == null) projectPath = "./";
WebAppContext webapp = new WebAppContext();
// useFileMappedBuffer=false
webapp.setDefaultsDescriptor(projectPath + "src/test/resources/webdefault.xml");
webapp.setContextPath("/");
webapp.setResourceBase(projectPath + "src/main/webapp");
webapp.setClassLoader(classLoader);
Server server = new Server(port);
server.setHandler(webapp);
server.addLifeCycleListener(new AbstractLifeCycle.AbstractLifeCycleListener() {
@Override
public void lifeCycleStarted(LifeCycle lifeCycle) {
System.err.println("\n");
System.err.println("================================================================");
System.err.println("====\t\t Symphony Local is started ! PORT : " + port + "\t\t====");
System.err.println("================================================================");
System.err.println("\n");
}
});
server.start();
server.join();
}
public static void main(String[] args) throws Exception {
JettyTestServer jettyTestServer = new JettyTestServer();
jettyTestServer.start(8080, true);
}
}
|
package org.starfishrespect.myconsumption.server.entities;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import java.util.Date;
public class DayStat {
@Id
private String id;
@Indexed
private String sensorId;
@Indexed
private Date day;
private Integer maxTimestamp;
private Integer maxValue;
private Integer minTimestamp;
private Integer minValue;
private Integer average;
//private Integer consumption;
private Integer consumptionDay;
private Integer consumptionNight;
public DayStat() {
}
public DayStat(String sensorId, Date day) {
this.sensorId = sensorId;
this.day = day;
}
public String getSensorId() {
return sensorId;
}
public Date getDay() {
return day;
}
public Integer getMaxTimestamp() {
return maxTimestamp;
}
public void setMaxTimestamp(Integer maxTimestamp) {
this.maxTimestamp = maxTimestamp;
}
public Integer getMaxValue() {
return maxValue;
}
public void setMaxValue(Integer maxValue) {
this.maxValue = maxValue;
}
public Integer getMinTimestamp() {
return minTimestamp;
}
public void setMinTimestamp(Integer minTimestamp) {
this.minTimestamp = minTimestamp;
}
public Integer getMinValue() {
return minValue;
}
public void setMinValue(Integer minValue) {
this.minValue = minValue;
}
public Integer getAverage() {
return average;
}
public void setAverage(Integer average) {
this.average = average;
}
public Integer getConsumption() {
return consumptionDay + consumptionNight;
}
public Integer getConsumptionDay() {
return consumptionDay;
}
public void setConsumptionDay(Integer consumptionDay) {
this.consumptionDay = consumptionDay;
}
public Integer getConsumptionNight() {
return consumptionNight;
}
public void setConsumptionNight(Integer consumptionNight) {
this.consumptionNight = consumptionNight;
}
}
|
package test.com.qiniu.processing;
import com.google.gson.Gson;
import com.qiniu.common.QiniuException;
import com.qiniu.common.Zone;
import com.qiniu.processing.OperationManager;
import com.qiniu.processing.OperationStatus;
import com.qiniu.storage.Configuration;
import com.qiniu.storage.Region;
import com.qiniu.util.StringUtils;
import com.qiniu.util.UrlSafeBase64;
import org.junit.Assert;
import org.junit.Test;
import test.com.qiniu.ResCode;
import test.com.qiniu.TestConfig;
import java.util.*;
import static org.junit.Assert.fail;
public class PfopTest {
/**
* pfop
* jobid
*/
@Test
public void testPfop() throws QiniuException {
Map<String, Region> bucketKeyMap = new HashMap<String, Region>();
TestConfig.TestFile[] files = TestConfig.getTestFileArray();
for (TestConfig.TestFile testFile : files) {
bucketKeyMap.put(testFile.getBucketName(), testFile.getRegion());
}
List<String> ids = new ArrayList<>();
Configuration cfg = new Configuration();
OperationManager operationManager = new OperationManager(TestConfig.testAuth, cfg);
for (Map.Entry<String, Region> entry : bucketKeyMap.entrySet()) {
String bucket = entry.getKey();
Region region = entry.getValue();
String notifyURL = null;
boolean force = true;
String m3u8SaveEntry = String.format("%s:%s", bucket, TestConfig.testMp4FileKey + "_320x240.m3u8");
String fopM3u8 = String.format("avthumb/m3u8/segtime/10/vcodec/libx264/s/320x240|saveas/%s",
UrlSafeBase64.encodeToString(String.format(m3u8SaveEntry)));
String mp4SaveEntry = String.format("%s:%s", bucket, TestConfig.testMp4FileKey + "_320x240.mp4");
String fopMp4 = String.format("avthumb/mp4/vcodec/libx264/s/320x240|saveas/%s",
UrlSafeBase64.encodeToString(mp4SaveEntry));
String fops = StringUtils.join(new String[]{fopM3u8, fopMp4}, ";");
System.out.println(fops);
try {
String jobid = operationManager.pfop(bucket, TestConfig.testMp4FileKey, fops, null,
notifyURL, force);
Assert.assertNotNull(jobid);
Assert.assertNotEquals("", jobid);
ids.add(jobid);
} catch (QiniuException e) {
fail(e.response.toString());
}
}
System.out.println("\n\n");
for (String jobid : ids) {
String purl = "https://api.qiniu.com/status/get/prefop?id=" + jobid;
System.out.println(purl);
OperationStatus status = operationManager.prefop(jobid);
System.out.println(new Gson().toJson(status));
Assert.assertEquals(jobid, status.id);
}
System.out.println("\n\n");
try {
Thread.sleep(1000 * 7);
} catch (Exception e) {
// ingore
}
for (String jobid : ids) {
String purl = "https://api.qiniu.com/status/get/prefop?id=" + jobid;
System.out.println(purl);
OperationStatus status = operationManager.prefop(jobid);
System.out.println(new Gson().toJson(status));
Assert.assertEquals(jobid, status.id);
}
for (String jobid : ids) {
testPfopIsSuccess(jobid);
}
}
/**
* prefop
* status0
*/
private void testPfopIsSuccess(String jobid) {
long maxWaitTime = 30 * 60 * 1000;
Date startDate = new Date();
OperationStatus status = null;
do {
try {
Configuration cfg = new Configuration(Zone.autoZone());
OperationManager operationManager = new OperationManager(TestConfig.testAuth, cfg);
status = operationManager.prefop(jobid);
} catch (QiniuException ex) {
ex.printStackTrace();
Assert.assertTrue(ResCode.find(ex.code(), ResCode.getPossibleResCode(612)));
break;
}
Date currentDate = new Date();
if (currentDate.getTime() - startDate.getTime() > maxWaitTime) {
break;
}
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
} while (status == null || status.code != 0);
Assert.assertNotNull(status);
System.out.println(new Gson().toJson(status));
Assert.assertEquals(0, status.code);
}
}
|
package tests.tickets;
import org.testng.Assert;
import org.testng.annotations.Test;
import pages.HomePage;
import pages.TicketsPage;
import tests.BaseTest;
import utils.Log4Test;
public class BuyInfantTicketsTest extends BaseTest {
@Test(dataProvider = "tickets", dataProviderClass = TicketsData.class)
public void buyInfantTickets(int adults, int children, int infants) {
HomePage homePage = new HomePage(driver);
homePage.openAirTicketsPage();
TicketsPage ticketsPage = new TicketsPage(driver);
ticketsPage.buyTickets(adults, children, infants);
Assert.assertTrue(ticketsPage.isErrorMessageDisplayed(), Log4Test.error("Error pop-up is not displayed."));
}
}
|
// jTDS JDBC Driver for Microsoft SQL Server and Sybase
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package net.sourceforge.jtds.jdbc;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* @author
* Holger Rehn
*/
public class StatementTest extends TestBase
{
public StatementTest( String name )
{
super( name );
}
/**
* Test for bug #544, getMoreResults() does not work with insert triggers.
*/
public void testBug544()
throws Exception
{
dropTrigger( "Bug544T" );
dropTable( "Bug544a" );
dropTable( "Bug544b" );
Statement sta = con.createStatement();
sta.executeUpdate( "create table Bug544a(A int, B int identity not null)" );
sta.executeUpdate( "create table Bug544b(A int)" );
// insert a row to bump up the identity value
sta.execute( "insert into Bug544a values( 9 )" );
// create insert trigger
sta.executeUpdate( "create trigger Bug544T on Bug544a for insert as begin insert into Bug544b values (12) end" );
// insert data to fire the trigger
sta.execute( "insert into Bug544a values( 1 ) select @@identity" );
// dumpAll( sta );
// check update counts
assertEquals( 1, sta.getUpdateCount() ); // original insert
assertFalse( sta.getMoreResults() );
assertEquals( 1, sta.getUpdateCount() ); // insert executed by the trigger
assertTrue( sta.getMoreResults() );
ResultSet res = sta.getResultSet(); // result of "select @@identity"
assertTrue( res.next() );
assertEquals( 2, res.getInt( 1 ) ); // the generated value
assertFalse( res.next() );
// check the target table
res = sta.executeQuery( "select * from Bug544b" );
assertTrue( res.next() );
assertEquals( 12, res.getInt( 1 ) );
assertFalse( res.next() );
}
/**
* Test for bug #500, Statement.execute() raises executeQuery() exception if
* using cursors (useCursors=true) and SHOWPLAN_ALL is set to ON.
*/
public void testBug500()
throws Exception
{
Properties override = new Properties();
override.put( "useCursors", "true" );
Connection connection = getConnection( override );
Statement stmt = connection.createStatement();
stmt.executeUpdate( "create table #Bug500 (A int)" );
for( int i = 0; i < 10; i ++ )
{
stmt.executeUpdate( "insert into #Bug500 values(" + i + ")" );
}
stmt.executeUpdate( "set SHOWPLAN_ALL on" );
// or stmt.execute( "set SHOWPLAN_ALL on" ); - doesn't matters
stmt.execute( "select top 5 * from #Bug500" );
dumpAll( stmt );
// stmt.execute( "select top 5 * from #Bug500" );
// ResultSet rs = stmt.getResultSet();
}
/**
* Regression test for bug #528, ResultSet not getting populated correctly
* with autogenerated keys.
*/
public void testBug528()
throws Exception
{
Statement sta = con.createStatement();
sta.executeUpdate( "create table #Bug528 (A int identity(11,1) not null, B varchar(10))" );
boolean result = sta.execute( "insert into #Bug528(B) values ('test');" + // Update Count: 1
"insert into #Bug528(B) values ('test');" + // Update Count: 1
"select * from #Bug528", // ResultSet: [11, test] [12, test]
Statement.RETURN_GENERATED_KEYS ); // Generated Keys: 12
// dumpAll( sta );
assertFalse( result ); // first is an update count for 1st insert
assertEquals( 1, sta.getUpdateCount() ); // check update count
assertFalse( sta.getMoreResults() ); // next is an update count for 2nd insert
assertEquals( 1, sta.getUpdateCount() ); // check update count
assertTrue( sta.getMoreResults() ); // resultset generated by select (this used to fail)
// get and check resultset
ResultSet res = sta.getResultSet();
assertTrue( res.next() );
assertEquals( 11, res.getInt( 1 ) );
assertEquals( "test", res.getString( 2 ) );
assertTrue( res.next() );
assertEquals( 12, res.getInt( 1 ) );
assertEquals( "test", res.getString( 2 ) );
assertFalse( res.next() );
// now check generated keys
res = sta.getGeneratedKeys();
// FIXME: the driver is not yet able to return generated keys for anything but the last update
// assertTrue( res.next() );
// assertEquals( 11, res.getInt( 1 ) );
assertTrue( res.next() );
assertEquals( 12, res.getInt( 1 ) );
assertFalse( res.next() );
sta.close();
}
/**
* Test for bug #559, unique constraint violation error hidden by an internal
* jTDS error.
*/
public void testBug559()
throws Exception
{
Statement st = con.createStatement();
st.executeUpdate( "create table #Bug559 (A int, unique (A))" );
try
{
st.executeUpdate( "select 1;insert into #Bug559 values( 1 );insert into #Bug559 values( 1 )" );
fail();
}
catch( SQLException e )
{
// expected, executeUpdate() cannot return a resultset
assertTrue( e.getMessage().toLowerCase().contains( "executeupdate" ) );
}
st.close();
}
/**
* Test for bug #609, slow finalization in {@link SharedSocket#closeStream()}
* can block JVM finalizer thread or cause OOM errors.
*/
public void testBug609()
throws Exception
{
final int STATEMENTS = 50000;
final int THREADS = 10;
final Connection connection = con;
final boolean[] running = new boolean[] { true };
List block = new ArrayList( 1000 );
try
{
while( true )
{
block.add( new byte[32*1024*1024] );
System.gc();
}
}
catch( OutOfMemoryError oome )
{
block.remove( block.size() - 1 );
}
System.gc();
System.out.println( "free memory: " + Runtime.getRuntime().freeMemory() / 1024 / 1024 + " MB" );
Statement sta = connection.createStatement();
sta.executeUpdate( "create table #bug609( A int primary key, B varchar(max) )" );
sta.close();
Thread[] threads = new Thread[THREADS];
// start threads that keeps sending data to block VirtualSocket table in SharedSocket as much as possible
for( int t = 0; t < threads.length; t ++ )
{
final int i = t;
threads[t] = new Thread()
{
public void run()
{
try
{
Statement sta = connection.createStatement();
sta.executeUpdate( "insert into #bug609 values( " + i + ", 'nix' )" );
String value = "BIGVAL";
while( value.length() < 64 * 1024 )
{
value += value + "BIGVAL";
}
String sql = "update #bug609 set B = '" + value + "' where A = " + i;
while( running[0] )
{
sta.executeUpdate( sql );
}
sta.close();
}
catch( SQLException s )
{
// test stopped, connection is closed
}
catch( Throwable t )
{
t.printStackTrace();
}
}
};
threads[t].setPriority( Thread.MIN_PRIORITY );
threads[t].start();
}
int stats = 0;
long start = System.currentTimeMillis();
try
{
// buffer some statements that can later be closed together, otherwise
// the connection's TdsCore cache would prevent the TdsCore from being
// closed (and SharedSocket.closeStream to be called) most of the time
Statement[] buffered = new Statement[2500];
for( ; stats < STATEMENTS; stats ++ )
{
int r = stats % buffered.length;
buffered[r] = con.createStatement();
if( r == buffered.length - 1 )
{
for( int c = 0; c < buffered.length; c ++ )
{
buffered[c] = null;
}
System.out.println( stats + 1 );
}
}
}
catch( OutOfMemoryError oome )
{
block = null;
System.gc();
fail( "OOM after " + (System.currentTimeMillis() - start) + " ms, " + stats + " statements created successfully" );
}
long elapsed = System.currentTimeMillis() - start;
System.out.println( "time: " + elapsed + " ms" );
assertTrue( elapsed < 10000 );
// stop threads
running[0] = false;
for( int t = 0; t < threads.length; t ++ )
{
threads[t].join();
}
}
/**
* Test for bug #473, Statement.setMaxRows() also effects INSERT, UPDATE,
* DELETE and SELECT INTO.
*/
public void testBug473()
throws Exception
{
Statement sta = con.createStatement();
// create test table and fill with data
sta.executeUpdate( "create table #Bug473( X int )" );
sta.executeUpdate( "insert into #Bug473 values( 1 )" );
sta.executeUpdate( "insert into #Bug473 values( 2 )" );
// copy all data (maxRows shouldn't have any effect)
sta.setMaxRows( 1 );
sta.executeUpdate( "select * into #copy from #Bug473" );
// ensure all table data has been copied
sta.setMaxRows( 0 );
ResultSet res = sta.executeQuery( "select * from #copy" );
assertTrue ( res.next() );
assertTrue ( res.next() );
assertFalse( res.next() );
res.close();
sta.close();
}
/**
* Test for bug #635, select from a view with order by clause doesn't work if
* correctly if using Statement.setMaxRows().
*/
public void testBug635()
throws Exception
{
final int[] data = new int[] { 1, 3, 5, 7, 9, 2, 4, 6, 8, 10 };
dropTable( "Bug635T" );
dropView ( "Bug635V" );
Statement sta = con.createStatement();
sta.setMaxRows( 7 );
sta.executeUpdate( "create table Bug635T( X int )" );
sta.executeUpdate( "create view Bug635V as select * from Bug635T" );
for( int i = 0; i < data.length; i ++ )
{
sta.executeUpdate( "insert into Bug635T values( " + data[i] + " )" );
}
ResultSet res = sta.executeQuery( "select X from Bug635V order by X" );
for( int i = 1; i <= 7; i ++ )
{
assertTrue( res.next() );
assertEquals( i, res.getInt( 1 ) );
}
res.close();
sta.close();
}
/**
* Test for bug #624, full text search causes connection reset when connected
* to Microsoft SQL Server 2008.
*/
// TODO: test CONTAINSTABLE, FREETEXT, FREETEXTTABLE
public void testFullTextSearch()
throws Exception
{
// cleanup
dropTable( "Bug624" );
dropDatabase( "Bug624DB" );
// create DB
Statement stmt = con.createStatement();
stmt.executeUpdate( "create database Bug624DB" );
stmt.executeUpdate( "use Bug624DB" );
// create table and fulltext index
stmt.executeUpdate( "create fulltext catalog FTS_C as default" );
stmt.executeUpdate( "create table Bug624 ( ID int primary key, A varchar( 100 ) )" );
ResultSet res = stmt.executeQuery( "select name from sysindexes where object_id( 'Bug624' ) = id" );
assertTrue( res.next() );
String pk = res.getString( 1 );
assertFalse( res.next() );
res.close();
stmt.executeUpdate( "create fulltext index on Bug624( A ) key index " + pk );
// insert test data
assertEquals( 1, stmt.executeUpdate( "insert into Bug624 values( 0, 'Strange Axolotl, that!' )" ) );
// wait for the index to be build
for( boolean indexed = false; ! indexed; )
{
res = stmt.executeQuery( "select FULLTEXTCATALOGPROPERTY( 'FTS_C', 'PopulateStatus' )" );
assertTrue( res.next() );
indexed = res.getInt( 1 ) == 0;
res.close();
Thread.sleep( 10 );
}
// query table using CONTAINS
PreparedStatement ps = con.prepareStatement( "select * from Bug624 where contains( A, ? )" );
ps.setString( 1, "Axolotl" );
res = ps.executeQuery();
assertTrue( res.next() );
assertEquals( 0, res.getInt( 1 ) );
assertEquals( "Strange Axolotl, that!", res.getString( 2 ) );
}
/**
* Test for computed results, bug #678.
*/
public void testComputeClause()
throws Exception
{
final int VALUES = 150;
Statement sta = con.createStatement();
sta.executeUpdate( "create table #Bug678( X int, A varchar(10), B int, C bigint )" );
for( int i = 0; i < VALUES; i ++ )
{
sta.executeUpdate( "insert into #Bug678 values( " + i % Math.max( 1, i / 20 ) + ", 'VAL" + i + "'," + ( VALUES - i ) + ", " + (long)i * Integer.MAX_VALUE + " )" );
}
assertTrue( sta.execute( "select * from #Bug678 order by X, A asc compute min( A ), max( A ), min( C ), max( C ), avg( B ), sum( B ), count( A ), count_big( C ) by X" ) );
// expected result groups, each followed by a computed result
int[] expected = new int[] { 72, 32, 20, 13, 8, 4, 1 };
for( int i = 0; i < expected.length; i ++ )
{
ResultSet res = sta.getResultSet();
// consume rows
for( int r = 0; r < expected[i]; r ++ )
{
assertTrue( res.next() );
}
assertFalse( res.next() );
res.close();
// consume computed result
assertTrue( sta.getMoreResults() );
res = sta.getResultSet();
assertTrue( res.next() );
assertEquals( expected[i], res.getInt( 7 ) );
assertFalse( res.next() );
res.close();
// move to next result if any
assertEquals( i == expected.length -1 ? false : true, sta.getMoreResults() );
}
// no update count expected for MSSQL, Sybase seems to sum up the inserts and computed rows to a total of 157
assertEquals( isMSSQL() ? -1 : 157, sta.getUpdateCount() );
sta.close();
}
/**
* <p> Test to ensure that single results generated as result of aggregation
* operations (COMPUTE clause) can be closed individually without affecting
* remaining {@link ResultSet}s. </p>
*/
public void testCloseComputedResult()
throws Exception
{
Statement sta = con.createStatement();
sta.executeUpdate( "create table #Bug678( NAME varchar(10), CREDITS int )" );
sta.executeUpdate( "insert into #Bug678 values( 'Alf' , 10 )" );
sta.executeUpdate( "insert into #Bug678 values( 'Alf' , 20 )" );
sta.executeUpdate( "insert into #Bug678 values( 'Alf' , 30 )" );
sta.executeUpdate( "insert into #Bug678 values( 'Ronny', 5 )" );
sta.executeUpdate( "insert into #Bug678 values( 'Ronny', 10 )" );
assertTrue( sta.execute( "select * from #Bug678 order by NAME compute sum( CREDITS ) by NAME" ) );
ResultSet res = sta.getResultSet();
// check 1st row of 1st ResultSet
assertTrue ( res.next() );
assertEquals( "Alf", res.getString( 1 ) );
assertEquals( 10, res.getInt( 2 ) );
assertTrue ( res.next() );
// close 1st ResultSet
res.close();
// 3 ResultSets should be left
assertTrue( sta.getMoreResults() );
res = sta.getResultSet();
// close 2nd (computed) ResultSet without processing it
res.close();
// 2 ResultSets should be left
assertTrue( sta.getMoreResults() );
res = sta.getResultSet();
// check 1st row of 3rd ResultSet
assertTrue( res.next() );
assertEquals( "Ronny", res.getString( 1 ) );
assertEquals( 5, res.getInt( 2 ) );
// close 3rd ResultSet
res.close();
// 1 ResultSet should be left
assertTrue( sta.getMoreResults() );
res = sta.getResultSet();
// check 1st row of 4th (computed) ResultSet
assertTrue( res.next() );
assertEquals( 15, res.getInt( 1 ) );
assertFalse( res.next() );
// no ResultSets should be left
assertFalse( sta.getMoreResults() );
sta.close();
}
public void testConcurrentClose()
throws Exception
{
final int THREADS = 10;
final int STATEMENTS = 200;
final int RESULTSETS = 100;
final List errors = new ArrayList<>();
final Statement[] stm = new Statement[STATEMENTS];
final ResultSet[] res = new ResultSet[STATEMENTS*RESULTSETS];
Connection con = getConnection();
for( int i = 0; i < STATEMENTS; i ++ )
{
stm[i] = con.createStatement();
for( int r = 0; r < RESULTSETS; r ++ )
{
res[i * RESULTSETS + r] = stm[i].executeQuery( "select 1" );
}
}
Thread[] threads = new Thread[THREADS];
for( int i = 0; i < THREADS; i ++ )
{
threads[i] = new Thread( "closer " + i )
{
public void run()
{
try
{
for( int i = 0; i < STATEMENTS; i ++ )
{
stm[i].close();
}
}
catch( Exception e )
{
synchronized( errors )
{
errors.add( e );
}
}
}
};
}
for( int i = 0; i < THREADS; i ++ )
{
threads[i].start();
}
for( int i = 0; i < THREADS; i ++ )
{
threads[i].join();
}
for( int i = 0; i < errors.size(); i ++ )
{
( (Exception) errors.get( i ) ).printStackTrace();
}
assertTrue( errors.toString(), errors.isEmpty() );
}
/**
* Regression test for bug #677, deadlock in {@link JtdsStatement#close()}.
*/
public void testCloseDeadlock()
throws Exception
{
final int THREADS = 100;
final int STATEMENTS = 1000;
final List errors = new ArrayList<>();
Thread[] threads = new Thread[THREADS];
for( int i = 0; i < THREADS; i ++ )
{
threads[i] = new Thread( "deadlock " + i )
{
public void run()
{
try
{
Connection con = getConnection();
final Statement[] stm = new Statement[STATEMENTS];
for( int i = 0; i < STATEMENTS; i ++ )
{
stm[i] = con.createStatement();
}
new Thread( Thread.currentThread().getName() + " (closer)" )
{
public void run()
{
try
{
for( int i = 0; i < STATEMENTS; i ++ )
{
stm[i].close();
}
}
catch( SQLException e )
{
// statements might already be closed by closing the connection
if( ! "HY010".equals( e.getSQLState() ) )
{
synchronized( errors )
{
errors.add( e );
}
}
}
}
}.start();
Thread.sleep( 1 );
con.close();
}
catch( Exception e )
{
synchronized( errors )
{
errors.add( e );
}
}
}
};
}
for( int i = 0; i < THREADS; i ++ )
{
threads[i].start();
}
System.currentTimeMillis();
int running = THREADS;
while( running != 0 )
{
Thread.sleep( 2500 );
int last = running;
running = THREADS;
for( int i = 0; i < THREADS; i ++ )
{
if( threads[i].getState() == Thread.State.TERMINATED )
{
running
}
}
if( running == last )
{
// for( int i = 0; i < THREADS; i ++ )
// if( threads[i].getState() != Thread.State.TERMINATED )
// Exception e = new Exception();
// e.setStackTrace( threads[i].getStackTrace() );
// e.printStackTrace();
fail( "deadlock detected, none of the remaining connections closed within 2500 ms" );
}
}
// for( int i = 0; i < errors.size(); i ++ )
// ( (Exception) errors.get( i ) ).printStackTrace();
assertTrue( errors.toString(), errors.isEmpty() );
}
/**
* Test for #676, error in multi line comment handling.
*/
public void testMultiLineComment()
throws Exception
{
Statement st = con.createStatement();
st.executeUpdate( "create table /*/ comment '\"?@[*-} /**/*/ #Bug676a (A int) /* */" );
try
{
// SQL server stacks, instead of ignoring 'inner comments'
st.executeUpdate( "create table #Bug676b (A int)" );
}
catch( SQLException e )
{
// thrown by jTDS due to unclosed 'inner comment'
assertEquals( String.valueOf( 22025 ), e.getSQLState() );
}
st.close();
}
/**
* Test for bug #669, no error if violating unique constraint in update.
*/
public void testDuplicateKey()
throws Exception
{
Statement st = con.createStatement();
st.executeUpdate( "create table #Bug669 (A int, unique (A))" );
st.executeUpdate( "insert into #Bug669 values( 1 )" );
try
{
st.executeUpdate( "insert into #Bug669 values( 1 )" );
fail();
}
catch( SQLException e )
{
// expected, unique constraint violation
}
try
{
st.execute( "insert into #Bug669 values( 1 )" );
fail();
}
catch( SQLException e )
{
// expected, unique constraint violation
}
st.close();
}
/**
* <p> Test for bug [1694194], queryTimeout does not work on MSSQL2005 when
* property 'useCursors' is set to 'true'. Furthermore, the test also checks
* timeout with a query that cannot use a cursor. </p>
*
* <p> This test requires property 'queryTimeout' to be set to true. </p>
*/
public void testQueryTimeout() throws Exception
{
Statement st = con.createStatement();
st.setQueryTimeout( 1 );
st.execute( "create procedure #testTimeout as begin waitfor delay '00:00:30'; select 1; end" );
long start = System.currentTimeMillis();
try
{
// this query doesn't use a cursor
st.executeQuery( "exec #testTimeout" );
fail( "query did not time out" );
}
catch( SQLException e )
{
assertEquals( "HYT00", e.getSQLState() );
assertEquals( 1000, System.currentTimeMillis() - start, 50 );
}
st.execute( "create table #dummy1(A varchar(200))" );
st.execute( "create table #dummy2(B varchar(200))" );
st.execute( "create table #dummy3(C varchar(200))" );
// create test data
con.setAutoCommit( false );
for( int i = 0; i < 100; i++ )
{
st.execute( "insert into #dummy1 values('" + i + "')" );
st.execute( "insert into #dummy2 values('" + i + "')" );
st.execute( "insert into #dummy3 values('" + i + "')" );
}
con.commit();
con.setAutoCommit( true );
start = System.currentTimeMillis();
try
{
// this query can use a cursor
st.executeQuery( "select * from #dummy1, #dummy2, #dummy3 order by A desc, B asc, C desc" );
fail( "query did not time out" );
}
catch( SQLException e )
{
assertEquals( "HYT00", e.getSQLState() );
assertEquals( 1000, System.currentTimeMillis() - start, 100 );
}
st.close();
}
}
|
package org.jdesktop.swingx;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.util.regex.Pattern;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.JTable;
import javax.swing.JTree;
import javax.swing.ToolTipManager;
import javax.swing.table.TableCellRenderer;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreePath;
import org.jdesktop.swingx.decorator.AlternateRowHighlighter;
import org.jdesktop.swingx.decorator.ComponentAdapter;
import org.jdesktop.swingx.decorator.ConditionalHighlighter;
import org.jdesktop.swingx.decorator.Filter;
import org.jdesktop.swingx.decorator.FilterPipeline;
import org.jdesktop.swingx.decorator.HierarchicalColumnHighlighter;
import org.jdesktop.swingx.decorator.Highlighter;
import org.jdesktop.swingx.decorator.HighlighterPipeline;
import org.jdesktop.swingx.decorator.PatternFilter;
import org.jdesktop.swingx.decorator.PatternHighlighter;
import org.jdesktop.swingx.decorator.ShuttleSorter;
import org.jdesktop.swingx.treetable.DefaultTreeTableModel;
import org.jdesktop.swingx.treetable.FileSystemModel;
import org.jdesktop.swingx.treetable.TreeTableModel;
// import de.kleopatra.view.LFSwitcher;
public class JXTreeTableUnitTest extends InteractiveTestCase {
private TreeTableModel treeTableModel;
public JXTreeTableUnitTest() {
super("JXTreeTable Unit Test");
}
public void testRowForPath() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// @todo - make sure we find an expandible row instead of hardcoding
int row = 5;
TreePath path = treeTable.getPathForRow(row);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(path));
int rowCount = treeTable.getRowCount();
treeTable.expandRow(row - 1);
// sanity assert
assertTrue("really expanded", treeTable.getRowCount() > rowCount);
TreePath expanded = treeTable.getPathForRow(row);
assertNotSame("path at original row must be different when expanded", path, expanded);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(expanded));
}
public void testPathForRowContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
assertNull("row < 0 must return null path", treeTable.getPathForRow(-1));
assertNull("row >= getRowCount must return null path", treeTable.getPathForRow(treeTable.getRowCount()));
}
public void testTableRowAtNegativePoint() {
JXTable treeTable = new JXTable(1, 4);
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testTableRowAtOutsidePoint() {
JTable treeTable = new JTable(2, 4);
int negativeYRowHeight = (treeTable.getRowHeight()+ treeTable.getRowMargin()) * treeTable.getRowCount() ;
int negativeYRowHeightPlusOne = negativeYRowHeight - 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
// assertEquals("minimal negative y location must return row -1",
// -1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testPathForLocationContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// this is actually a JTable rowAtPoint bug: falsely calculates
// row == 0 if - 1 >= y > - getRowHeight()
//assertEquals("location outside must return null path", null, treeTable.getPathForLocation(-1, -(treeTable.getRowHeight() - 1)));
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
/**
* Issue #151: renderer properties ignored after setting treeTableModel.
*
*/
public void testRendererProperties() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// storing negates of properties
boolean expandsSelected = !treeTable.getExpandsSelectedPaths();
boolean scrollsOnExpand = !treeTable.getScrollsOnExpand();
boolean showRootHandles = !treeTable.getShowsRootHandles();
boolean rootVisible = !treeTable.isRootVisible();
// setting negates properties
treeTable.setExpandsSelectedPaths(expandsSelected);
treeTable.setScrollsOnExpand(scrollsOnExpand);
treeTable.setShowsRootHandles(showRootHandles);
treeTable.setRootVisible(rootVisible);
// assert negates are set - sanity assert
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
// setting a new model
treeTable.setTreeTableModel(new DefaultTreeTableModel());
// assert negates are set
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
}
/**
* Issue #148: line style client property not respected by renderer.
*
*/
public void testLineStyle() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "JTree.lineStyle";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertEquals(propertyName + " set on renderer", "Horizontal", renderer
.getClientProperty(propertyName));
}
/**
* sanity test: arbitrary client properties not passed to renderer.
*
*/
public void testArbitraryClientProperty() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "someproperty";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertNull(propertyName + " not set on renderer", renderer
.getClientProperty(propertyName));
}
public void interactiveTestFocusedCellBackground() {
JXTreeTable xtable = new JXTreeTable(treeTableModel);
xtable.setBackground(new Color(0xF5, 0xFF, 0xF5)); // ledger
JFrame frame = wrapWithScrollingInFrame(xtable, "Unselected focuse background");
frame.setVisible(true);
}
/**
* Issue #226: no per-cell tooltips in TreeColumn.
*/
public void interactiveTestToolTips() {
JXTreeTable tree = new JXTreeTable(treeTableModel);
// JW: don't use this idiom - Stackoverflow...
// multiple delegation - need to solve or discourage
tree.setTreeCellRenderer(createRenderer());
tree.setDefaultRenderer(Object.class, createTableRenderer(tree.getDefaultRenderer(Object.class)));
JFrame frame = wrapWithScrollingInFrame(tree, "tooltips");
frame.setVisible(true); // RG: Changed from deprecated method show();
}
private TableCellRenderer createTableRenderer(final TableCellRenderer delegate) {
TableCellRenderer l = new TableCellRenderer() {
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component result = delegate.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
((JComponent) result).setToolTipText(String.valueOf(value));
return result;
}
};
return l;
}
private TreeCellRenderer createRenderer() {
final TreeCellRenderer delegate = new DefaultTreeCellRenderer();
TreeCellRenderer renderer = new TreeCellRenderer() {
public Component getTreeCellRendererComponent(JTree tree, Object value,
boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
Component result = delegate.getTreeCellRendererComponent(tree, value,
selected, expanded, leaf, row, hasFocus);
((JComponent) result).setToolTipText(String.valueOf(tree.getPathForRow(row)));
return result;
}
};
return renderer;
}
/**
* reported: boolean not showing - not reproducible
*
*/
public void interactiveTestBooleanRenderer() {
final JXTreeTable treeTable = new JXTreeTable(new MyTreeTableModel());
treeTable.setRootVisible(true);
JFrame frame = wrapWithScrollingInFrame(treeTable, "boolean renderers");
frame.setVisible(true);
}
private class MyTreeTableModel extends DefaultTreeTableModel {
public MyTreeTableModel() {
final DefaultMutableTreeNode root =
new DefaultMutableTreeNode("Root");
root.add(new DefaultMutableTreeNode("A"));
root.add(new DefaultMutableTreeNode("B"));
this.setRoot(root);
}
public int getColumnCount() {
return 2;
}
public Class getColumnClass(int column) {
if (column == 1) {
return Boolean.class;
}
return super.getColumnClass(column);
}
public boolean isCellEditable(int row, int column) {
return true;
}
public boolean isCellEditable(Object value, int column) {
return true;
}
public Object getValueAt(Object o, int column) {
if (column == 0) {
return o.toString();
}
return new Boolean(true);
}
}
public void interactiveTestCompareTreeProperties() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setShowsRootHandles(false);
treeTable.setRootVisible(false);
JXTreeTable other = new JXTreeTable(treeTableModel);
other.setRootVisible(true);
other.setShowsRootHandles(false);
JFrame frame = wrapWithScrollingInFrame(treeTable, other, "compare rootVisible");
frame.setVisible(true);
}
/**
* setting tree properties: tree not updated correctly.
*/
public void interactiveTestTreeProperties() {
final JXTreeTable treeTable = new JXTreeTable(treeTableModel);
Action toggleHandles = new AbstractAction("Toggle Handles") {
public void actionPerformed(ActionEvent e) {
treeTable.setShowsRootHandles(!treeTable.getShowsRootHandles());
}
};
Action toggleRoot = new AbstractAction("Toggle Root") {
public void actionPerformed(ActionEvent e) {
treeTable.setRootVisible(!treeTable.isRootVisible());
}
};
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
JFrame frame = wrapWithScrollingInFrame(treeTable,
"Toggle Tree properties ");
addAction(frame, toggleRoot);
addAction(frame, toggleHandles);
frame.setVisible(true);
}
/** issue #148
* did not work on LFs which normally respect lineStyle
* winLF does not respect it anyway...
*/
public void interactiveTestFilterAndLineStyle() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// issue #148
// did not work on LFs which normally respect lineStyle
// winLF does not respect it anyway...
treeTable.putClientProperty("JTree.lineStyle", "Angled");
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setHighlighters(new HighlighterPipeline(new Highlighter[] {
AlternateRowHighlighter.quickSilver,
new HierarchicalColumnHighlighter(),
new PatternHighlighter(null, Color.red, "s.*",
Pattern.CASE_INSENSITIVE, 0, -1), }));
JFrame frame = wrapWithScrollingInFrame(treeTable,
"QuickSilver-, Column-, PatternHighligher and LineStyle");
frame.setVisible(true);
}
/**
* Issue #204: weird filtering.
*
*/
public void interactiveTestFilters() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.putClientProperty("JTree.lineStyle", "Angled");
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setFilters(new FilterPipeline(new Filter[] {
new PatternFilter( "d.*",
Pattern.CASE_INSENSITIVE, 0), }));
JFrame frame = wrapWithScrollingInFrame(treeTable,
"PatternFilter");
frame.setVisible(true);
}
/**
* Issue #??: weird sorting.
*
*/
public void interactiveTestSortingFilters() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setFilters(new FilterPipeline(new Filter[] {
new ShuttleSorter(1, false), }));
JFrame frame = wrapWithScrollingInFrame(treeTable,
"SortingFilter");
frame.setVisible(true);
}
public void interactiveTestFiltersAndRowHeight() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setHighlighters(new HighlighterPipeline(new Highlighter[] {
AlternateRowHighlighter.linePrinter,
new HierarchicalColumnHighlighter(), }));
JFrame frame = wrapWithScrollingInFrame(treeTable,
"LinePrinter-, ColumnHighlighter and RowHeight");
frame.setVisible(true);
}
public void interactiveTestAlternateRowHighlighter() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable
.setHighlighters(new HighlighterPipeline(
new Highlighter[] { AlternateRowHighlighter.classicLinePrinter, }));
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
JFrame frame = wrapWithScrollingInFrame(treeTable,
"ClassicLinePrinter and RowHeight");
frame.setVisible(true);
}
public void interactiveTestBackgroundHighlighter() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setHighlighters(new HighlighterPipeline(new Highlighter[] {
AlternateRowHighlighter.notePadBackground,
new HierarchicalColumnHighlighter(), }));
treeTable.setBackground(new Color(0xFF, 0xFF, 0xCC)); // notepad
treeTable.setGridColor(Color.cyan.darker());
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setShowHorizontalLines(true);
JFrame frame = wrapWithScrollingInFrame(treeTable,
"NotePadBackground- HierarchicalColumnHighlighter and horiz lines");
frame.setVisible(true);
}
public void interactiveTestLedgerBackground() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setBackground(new Color(0xF5, 0xFF, 0xF5)); // ledger
treeTable.setGridColor(Color.cyan.darker());
treeTable.setRowHeight(22);
treeTable.setRowMargin(1);
treeTable.setShowHorizontalLines(true);
JFrame frame = wrapWithScrollingInFrame(treeTable, "LedgerBackground");
frame.setVisible(true);
}
public void interactiveTestHierarchicalColumn() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setHighlighters(new HighlighterPipeline(
new Highlighter[] { new HierarchicalColumnHighlighter(), }));
JFrame frame = wrapWithScrollingInFrame(treeTable,
"HierarchicalColumnHigh");
frame.setVisible(true);
}
public void interactiveTestIntercellSpacing1() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setIntercellSpacing(new Dimension(1, 1));
treeTable.setShowGrid(true);
JFrame frame = wrapWithScrollingInFrame(treeTable, "Intercellspacing 1");
frame.setVisible(true);
}
public void interactiveTestIntercellSpacing2() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setIntercellSpacing(new Dimension(2, 2));
treeTable.setShowGrid(true);
JFrame frame = wrapWithScrollingInFrame(treeTable, "Intercellspacing 2");
frame.setVisible(true);
}
public void interactiveTestIntercellSpacing3() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setIntercellSpacing(new Dimension(3, 3));
treeTable.setShowGrid(true);
JFrame frame = wrapWithScrollingInFrame(treeTable, "Intercellspacing 3");
frame.setVisible(true);
}
public void interactiveTestHighlighterRowHeight() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setHighlighters(new HighlighterPipeline(
new Highlighter[] { new Highlighter(Color.orange, null), }));
treeTable.setIntercellSpacing(new Dimension(15, 15));
treeTable.setRowHeight(48);
JFrame frame = wrapWithScrollingInFrame(treeTable,
"Orange, IntercellSpacing15, big rowheight");
frame.setVisible(true);
}
public void interactiveTestHighLighters() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setIntercellSpacing(new Dimension(15, 15));
treeTable.setRowHeight(48);
// not supported in JXTreeTable
// treeTable.setRowHeight(0, 96);
treeTable.setShowGrid(true);
Highlighter conditional = new ConditionalHighlighter(Color.BLUE, null, 0, 0) {
protected boolean test(ComponentAdapter adapter) {
return adapter.hasFocus();
}
};
treeTable.setHighlighters(new HighlighterPipeline(
new Highlighter[] {
conditional,
new Highlighter(Color.orange, null),
new HierarchicalColumnHighlighter(),
new PatternHighlighter(null, Color.red,
".*D.*", 0, 0, 0), }));
JFrame frame = wrapWithScrollingInFrame(treeTable, "Highlighters");
frame.setVisible(true);
}
protected void setUp() throws Exception {
super.setUp();
treeTableModel = new FileSystemModel();
}
public static void main(String[] args) {
// LFSwitcher.metalLF();
JXTreeTableUnitTest test = new JXTreeTableUnitTest();
try {
// test.runInteractiveTests();
// test.runInteractiveTests("interactive.*HighLighters");
// test.runInteractiveTests("interactive.*SortingFilter.*");
// test.runInteractiveTests("interactive.*Tool.*");
test.runInteractiveTests("interactive.*Focus.*");
} catch (Exception ex) {
}
}
}
|
package tk.wurst_client.features.mods;
import java.util.Random;
import net.minecraft.block.material.Material;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import tk.wurst_client.events.listeners.UpdateListener;
import tk.wurst_client.features.Feature;
import tk.wurst_client.features.special_features.YesCheatSpf.BypassLevel;
import tk.wurst_client.utils.BlockUtils;
@Mod.Info(description = "Places random blocks around you.",
name = "BuildRandom",
tags = "build random",
help = "Mods/BuildRandom")
@Mod.Bypasses
public class BuildRandomMod extends Mod implements UpdateListener
{
private final Random random = new Random();
@Override
public Feature[] getSeeAlso()
{
return new Feature[]{wurst.mods.autoBuildMod, wurst.mods.fastPlaceMod,
wurst.mods.autoSwitchMod};
}
// TODO: Visual indicator of current position similar to the one in Nuker
@Override
public void onEnable()
{
wurst.events.add(UpdateListener.class, this);
}
@Override
public void onDisable()
{
wurst.events.remove(UpdateListener.class, this);
}
@Override
public void onUpdate()
{
if(wurst.mods.freecamMod.isActive()
|| wurst.mods.remoteViewMod.isActive())
return;
// check timer
if(mc.rightClickDelayTimer > 0 && !wurst.mods.fastPlaceMod.isActive())
return;
// set mode & range
boolean legitMode = wurst.special.yesCheatSpf.getBypassLevel()
.ordinal() > BypassLevel.ANTICHEAT.ordinal();
int range = legitMode ? 5 : 6;
int bound = range * 2 + 1;
BlockPos pos;
int attempts = 0;
do
{
// generate random position
pos = new BlockPos(mc.player).add(random.nextInt(bound) - range,
random.nextInt(bound) - range, random.nextInt(bound) - range);
attempts++;
}while(attempts < 128 && !tryToPlaceBlock(legitMode, pos));
}
private boolean tryToPlaceBlock(boolean legitMode, BlockPos pos)
{
if(BlockUtils.getMaterial(pos) != Material.AIR)
return false;
if(legitMode)
{
if(!BlockUtils.placeBlockLegit(pos))
return false;
mc.rightClickDelayTimer = 4;
}else
{
if(!BlockUtils.placeBlockSimple(pos))
return false;
mc.player.swingArm(EnumHand.MAIN_HAND);
mc.rightClickDelayTimer = 4;
}
return true;
}
}
|
package org.eclipse.emf.emfstore.client.model.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.eclipse.emf.emfstore.client.model.Configuration;
import org.eclipse.emf.emfstore.client.model.ModelPackage;
import org.eclipse.emf.emfstore.client.model.ServerInfo;
import org.eclipse.emf.emfstore.client.model.Usersession;
import org.eclipse.emf.emfstore.client.model.WorkspaceManager;
import org.eclipse.emf.emfstore.client.model.connectionmanager.ConnectionManager;
import org.eclipse.emf.emfstore.client.model.connectionmanager.KeyStoreManager;
import org.eclipse.emf.emfstore.client.model.observers.LoginObserver;
import org.eclipse.emf.emfstore.client.model.observers.LogoutObserver;
import org.eclipse.emf.emfstore.common.model.util.ModelUtil;
import org.eclipse.emf.emfstore.server.exceptions.AccessControlException;
import org.eclipse.emf.emfstore.server.exceptions.ConnectionException;
import org.eclipse.emf.emfstore.server.exceptions.EmfStoreException;
import org.eclipse.emf.emfstore.server.model.SessionId;
import org.eclipse.emf.emfstore.server.model.accesscontrol.ACUser;
import org.eclipse.emf.emfstore.server.model.accesscontrol.OrgUnitProperty;
/**
* <!-- begin-user-doc --> An implementation of the model object ' <em><b>Usersession</b></em>'. <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getUsername <em>Username</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getPassword <em>Password</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getSessionId <em>Session Id</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getPersistentPassword <em>Persistent Password
* </em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getServerInfo <em>Server Info</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#isSavePassword <em>Save Password</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getACUser <em>AC User</em>}</li>
* <li>{@link org.eclipse.emf.emfstore.client.model.impl.UsersessionImpl#getChangedProperties <em>Changed Properties
* </em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class UsersessionImpl extends EObjectImpl implements Usersession {
/**
* The default value of the '{@link #getUsername() <em>Username</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getUsername()
* @generated
* @ordered
*/
protected static final String USERNAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getUsername() <em>Username</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getUsername()
* @generated
* @ordered
*/
protected String username = USERNAME_EDEFAULT;
/**
* The default value of the '{@link #getPassword() <em>Password</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getPassword()
* @generated
* @ordered
*/
protected static final String PASSWORD_EDEFAULT = null;
/**
* The cached value of the '{@link #getPassword() <em>Password</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getPassword()
* @generated
* @ordered
*/
protected String password = PASSWORD_EDEFAULT;
/**
* The cached value of the '{@link #getSessionId() <em>Session Id</em>}' reference.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getSessionId()
* @generated
* @ordered
*/
protected SessionId sessionId;
/**
* The default value of the '{@link #getPersistentPassword() <em>Persistent Password</em>}' attribute.
* <!-- begin-user-doc --> <!--
* end-user-doc -->
*
* @see #getPersistentPassword()
* @generated
* @ordered
*/
protected static final String PERSISTENT_PASSWORD_EDEFAULT = null;
/**
* The cached value of the '{@link #getPersistentPassword() <em>Persistent Password</em>}' attribute.
* <!-- begin-user-doc --> <!--
* end-user-doc -->
*
* @see #getPersistentPassword()
* @generated
* @ordered
*/
protected String persistentPassword = PERSISTENT_PASSWORD_EDEFAULT;
/**
* The cached value of the '{@link #getServerInfo() <em>Server Info</em>}' reference.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getServerInfo()
* @generated
* @ordered
*/
protected ServerInfo serverInfo;
/**
* The default value of the '{@link #isSavePassword() <em>Save Password</em>}' attribute.
* <!-- begin-user-doc --> <!--
* end-user-doc -->
*
* @see #isSavePassword()
* @generated
* @ordered
*/
protected static final boolean SAVE_PASSWORD_EDEFAULT = false;
/**
* The cached value of the '{@link #isSavePassword() <em>Save Password</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @see #isSavePassword()
* @generated
* @ordered
*/
protected boolean savePassword = SAVE_PASSWORD_EDEFAULT;
/**
* The cached value of the '{@link #getACUser() <em>AC User</em>}' containment reference.
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @see #getACUser()
* @generated
* @ordered
*/
protected ACUser acUser;
/**
* The cached value of the '{@link #getChangedProperties()
* <em>Changed Properties</em>}' containment reference list. <!--
* begin-user-doc --> <!-- end-user-doc -->
*
* @see #getChangedProperties()
* @generated
* @ordered
*/
protected EList<OrgUnitProperty> changedProperties;
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
protected UsersessionImpl() {
super();
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
protected EClass eStaticClass() {
return ModelPackage.Literals.USERSESSION;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public String getUsername() {
return username;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setUsername(String newUsername) {
String oldUsername = username;
username = newUsername;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__USERNAME, oldUsername,
username));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public String getPasswordGen() {
return password;
}
// begin of custom code
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#getPassword()
* @generated NOT
*/
public String getPassword() {
if (isSavePassword()) {
return getPersistentPassword();
} else {
return password;
}
}
// end of custom code
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setPasswordGen(String newPassword) {
String oldPassword = password;
password = newPassword;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__PASSWORD, oldPassword,
password));
}
// begin of custom code
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#setPassword(java.lang.String)
* @generated NOT
*/
public void setPassword(String newPassword) {
if (newPassword != null) {
setPasswordGen(KeyStoreManager.getInstance().encrypt(newPassword, getServerInfo()));
} else {
setPasswordGen(null);
}
if (isSavePassword()) {
if (!(newPassword.equals(persistentPassword))) {
setPersistentPassword(KeyStoreManager.getInstance().encrypt(newPassword, getServerInfo()));
}
}
}
// end of custom code
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public SessionId getSessionIdGen() {
if (sessionId != null && sessionId.eIsProxy()) {
InternalEObject oldSessionId = (InternalEObject) sessionId;
sessionId = (SessionId) eResolveProxy(oldSessionId);
if (sessionId != oldSessionId) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ModelPackage.USERSESSION__SESSION_ID,
oldSessionId, sessionId));
}
}
return sessionId;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public SessionId basicGetSessionId() {
return sessionId;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setSessionId(SessionId newSessionId) {
SessionId oldSessionId = sessionId;
sessionId = newSessionId;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__SESSION_ID, oldSessionId,
sessionId));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public String getPersistentPassword() {
return persistentPassword;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setPersistentPasswordGen(String newPersistentPassword) {
String oldPersistentPassword = persistentPassword;
persistentPassword = newPersistentPassword;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__PERSISTENT_PASSWORD,
oldPersistentPassword, persistentPassword));
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#setPersistentPassword(java.lang.String)
* @generated NOT
*/
public void setPersistentPassword(String newPersistentPassword) {
setPersistentPasswordGen(newPersistentPassword);
}
// end of custom code
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ServerInfo getServerInfo() {
if (serverInfo != null && serverInfo.eIsProxy()) {
InternalEObject oldServerInfo = (InternalEObject) serverInfo;
serverInfo = (ServerInfo) eResolveProxy(oldServerInfo);
if (serverInfo != oldServerInfo) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ModelPackage.USERSESSION__SERVER_INFO,
oldServerInfo, serverInfo));
}
}
return serverInfo;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ServerInfo basicGetServerInfo() {
return serverInfo;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setServerInfo(ServerInfo newServerInfo) {
ServerInfo oldServerInfo = serverInfo;
serverInfo = newServerInfo;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__SERVER_INFO, oldServerInfo,
serverInfo));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public boolean isSavePassword() {
return savePassword;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setSavePasswordGen(boolean newSavePassword) {
boolean oldSavePassword = savePassword;
savePassword = newSavePassword;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__SAVE_PASSWORD,
oldSavePassword, savePassword));
}
// begin of custom code
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#setSavePassword(boolean)
* @generated NOT
*/
public void setSavePassword(boolean newSavePassword) {
if (!newSavePassword) {
setPersistentPassword(null);
} else if (getPasswordGen() != null) {
setPersistentPassword(getPasswordGen());
}
setSavePasswordGen(newSavePassword);
}
// end of custom code
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ACUser getACUser() {
if (acUser != null && acUser.eIsProxy()) {
InternalEObject oldACUser = (InternalEObject) acUser;
acUser = (ACUser) eResolveProxy(oldACUser);
if (acUser != oldACUser) {
InternalEObject newACUser = (InternalEObject) acUser;
NotificationChain msgs = oldACUser.eInverseRemove(this, EOPPOSITE_FEATURE_BASE
- ModelPackage.USERSESSION__AC_USER, null, null);
if (newACUser.eInternalContainer() == null) {
msgs = newACUser.eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ModelPackage.USERSESSION__AC_USER,
null, msgs);
}
if (msgs != null)
msgs.dispatch();
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ModelPackage.USERSESSION__AC_USER,
oldACUser, acUser));
}
}
return acUser;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public ACUser basicGetACUser() {
return acUser;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public NotificationChain basicSetACUser(ACUser newACUser, NotificationChain msgs) {
ACUser oldACUser = acUser;
acUser = newACUser;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET,
ModelPackage.USERSESSION__AC_USER, oldACUser, newACUser);
if (msgs == null)
msgs = notification;
else
msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public void setACUser(ACUser newACUser) {
if (newACUser != acUser) {
NotificationChain msgs = null;
if (acUser != null)
msgs = ((InternalEObject) acUser).eInverseRemove(this, EOPPOSITE_FEATURE_BASE
- ModelPackage.USERSESSION__AC_USER, null, msgs);
if (newACUser != null)
msgs = ((InternalEObject) newACUser).eInverseAdd(this, EOPPOSITE_FEATURE_BASE
- ModelPackage.USERSESSION__AC_USER, null, msgs);
msgs = basicSetACUser(newACUser, msgs);
if (msgs != null)
msgs.dispatch();
} else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.USERSESSION__AC_USER, newACUser,
newACUser));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
public EList<OrgUnitProperty> getChangedProperties() {
if (changedProperties == null) {
changedProperties = new EObjectContainmentEList.Resolving<OrgUnitProperty>(OrgUnitProperty.class, this,
ModelPackage.USERSESSION__CHANGED_PROPERTIES);
}
return changedProperties;
}
// begin of custom code
/**
* <!-- begin-user-doc --> Return whether session is logged in.
*
* @return true if session is logged in <!-- end-user-doc -->
* @generated NOT
*/
public boolean isLoggedIn() {
return this.sessionId != null;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#logIn()
* @generated NOT
*/
public void logIn() throws EmfStoreException, AccessControlException {
ConnectionManager connectionManager = WorkspaceManager.getInstance().getConnectionManager();
// sanity checks
if (getUsername() == null || getPassword() == null) {
throw new AccessControlException("Username or Password not set!");
}
ServerInfo serverInfo = getServerInfo();
if (serverInfo == null) {
throw new IllegalStateException("No ServerInfo set!");
}
if (serverInfo.getUrl() == null) {
throw new ConnectionException("Invalid server url: null");
}
// prepare serverInfo for send: copy and remove usersession
ServerInfo copy = ModelUtil.clone(serverInfo);
copy.setLastUsersession(null);
SessionId newSessionId = null;
newSessionId = connectionManager.logIn(username, getPassword(), copy, Configuration.getClientVersion());
getServerInfo().setLastUsersession(this);
this.setSessionId(newSessionId);
WorkspaceManager.getObserverBus().notify(LoginObserver.class).loginCompleted(this);
}
/**
* {@inheritDoc}
*/
public void logout() throws EmfStoreException {
ConnectionManager connectionManager = WorkspaceManager.getInstance().getConnectionManager();
connectionManager.logout(sessionId);
setSessionId(null);
WorkspaceManager.getObserverBus().notify(LogoutObserver.class).logoutCompleted(this);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ModelPackage.USERSESSION__AC_USER:
return basicSetACUser(null, msgs);
case ModelPackage.USERSESSION__CHANGED_PROPERTIES:
return ((InternalEList<?>) getChangedProperties()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
// end of custom code
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ModelPackage.USERSESSION__USERNAME:
return getUsername();
case ModelPackage.USERSESSION__PASSWORD:
return getPassword();
case ModelPackage.USERSESSION__SESSION_ID:
if (resolve)
return getSessionId();
return basicGetSessionId();
case ModelPackage.USERSESSION__PERSISTENT_PASSWORD:
return getPersistentPassword();
case ModelPackage.USERSESSION__SERVER_INFO:
if (resolve)
return getServerInfo();
return basicGetServerInfo();
case ModelPackage.USERSESSION__SAVE_PASSWORD:
return isSavePassword();
case ModelPackage.USERSESSION__AC_USER:
if (resolve)
return getACUser();
return basicGetACUser();
case ModelPackage.USERSESSION__CHANGED_PROPERTIES:
return getChangedProperties();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ModelPackage.USERSESSION__USERNAME:
setUsername((String) newValue);
return;
case ModelPackage.USERSESSION__PASSWORD:
setPassword((String) newValue);
return;
case ModelPackage.USERSESSION__SESSION_ID:
setSessionId((SessionId) newValue);
return;
case ModelPackage.USERSESSION__PERSISTENT_PASSWORD:
setPersistentPassword((String) newValue);
return;
case ModelPackage.USERSESSION__SERVER_INFO:
setServerInfo((ServerInfo) newValue);
return;
case ModelPackage.USERSESSION__SAVE_PASSWORD:
setSavePassword((Boolean) newValue);
return;
case ModelPackage.USERSESSION__AC_USER:
setACUser((ACUser) newValue);
return;
case ModelPackage.USERSESSION__CHANGED_PROPERTIES:
getChangedProperties().clear();
getChangedProperties().addAll((Collection<? extends OrgUnitProperty>) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ModelPackage.USERSESSION__USERNAME:
setUsername(USERNAME_EDEFAULT);
return;
case ModelPackage.USERSESSION__PASSWORD:
setPassword(PASSWORD_EDEFAULT);
return;
case ModelPackage.USERSESSION__SESSION_ID:
setSessionId((SessionId) null);
return;
case ModelPackage.USERSESSION__PERSISTENT_PASSWORD:
setPersistentPassword(PERSISTENT_PASSWORD_EDEFAULT);
return;
case ModelPackage.USERSESSION__SERVER_INFO:
setServerInfo((ServerInfo) null);
return;
case ModelPackage.USERSESSION__SAVE_PASSWORD:
setSavePassword(SAVE_PASSWORD_EDEFAULT);
return;
case ModelPackage.USERSESSION__AC_USER:
setACUser((ACUser) null);
return;
case ModelPackage.USERSESSION__CHANGED_PROPERTIES:
getChangedProperties().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
*
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ModelPackage.USERSESSION__USERNAME:
return USERNAME_EDEFAULT == null ? username != null : !USERNAME_EDEFAULT.equals(username);
case ModelPackage.USERSESSION__PASSWORD:
return PASSWORD_EDEFAULT == null ? password != null : !PASSWORD_EDEFAULT.equals(password);
case ModelPackage.USERSESSION__SESSION_ID:
return sessionId != null;
case ModelPackage.USERSESSION__PERSISTENT_PASSWORD:
return PERSISTENT_PASSWORD_EDEFAULT == null ? persistentPassword != null : !PERSISTENT_PASSWORD_EDEFAULT
.equals(persistentPassword);
case ModelPackage.USERSESSION__SERVER_INFO:
return serverInfo != null;
case ModelPackage.USERSESSION__SAVE_PASSWORD:
return savePassword != SAVE_PASSWORD_EDEFAULT;
case ModelPackage.USERSESSION__AC_USER:
return acUser != null;
case ModelPackage.USERSESSION__CHANGED_PROPERTIES:
return changedProperties != null && !changedProperties.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* {@inheritDoc}
*
* @generated NOT
*/
@Override
public String toString() {
if (eIsProxy()) {
return super.toString();
}
StringBuffer result = new StringBuffer(super.toString());
result.append(" (username: ");
result.append(username);
result.append(')');
return result.toString();
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.client.model.Usersession#getSessionId()
*/
public SessionId getSessionId() {
return getSessionIdGen();
}
} // UsersessionImpl
|
package org.zstack.storage.fusionstor.backup;
import org.zstack.header.storage.backup.BackupStorageEO;
import org.zstack.header.storage.backup.BackupStorageVO;
import org.zstack.header.tag.AutoDeleteTag;
import org.zstack.header.vo.EO;
import org.zstack.header.vo.NoView;
import javax.persistence.*;
import java.util.HashSet;
import java.util.Set;
@Entity
@Table
@PrimaryKeyJoinColumn(name="uuid", referencedColumnName="uuid")
@EO(EOClazz = BackupStorageEO.class, needView = false)
@AutoDeleteTag
public class FusionstorBackupStorageVO extends BackupStorageVO {
@OneToMany(fetch= FetchType.EAGER)
@JoinColumn(name="backupStorageUuid", insertable=false, updatable=false)
@NoView
private Set<FusionstorBackupStorageMonVO> mons = new HashSet<FusionstorBackupStorageMonVO>();
@Column
private String poolName;
public String getPoolName() {
return poolName;
}
public void setPoolName(String poolName) {
this.poolName = poolName;
}
@Column
private String fsid;
public String getFsid() {
return fsid;
}
public void setFsid(String fsid) {
this.fsid = fsid;
}
public FusionstorBackupStorageVO() {
}
public FusionstorBackupStorageVO(BackupStorageVO vo) {
super(vo);
}
public FusionstorBackupStorageVO(FusionstorBackupStorageVO other) {
super(other);
this.mons = other.mons;
}
public Set<FusionstorBackupStorageMonVO> getMons() {
return mons;
}
public void setMons(Set<FusionstorBackupStorageMonVO> mons) {
this.mons = mons;
}
}
|
package morfologik.speller;
import static morfologik.fsa.MatchResult.EXACT_MATCH;
import static morfologik.fsa.MatchResult.SEQUENCE_IS_A_PREFIX;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import java.text.Normalizer;
import java.text.Normalizer.Form;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import morfologik.fsa.FSA;
import morfologik.fsa.FSAFinalStatesIterator;
import morfologik.fsa.FSATraversal;
import morfologik.fsa.MatchResult;
import morfologik.stemming.Dictionary;
import morfologik.stemming.DictionaryMetadata;
import morfologik.util.BufferUtils;
public class Speller {
public static int MAX_WORD_LENGTH = 120;
final static int FREQ_RANGES = 'Z' - 'A' + 1;
final static int FIRST_RANGE_CODE = 'A'; // less frequent words
//FIXME: this is an upper limit for replacement searches, we need
//proper tree traversal instead of generation of all possible candidates
final static int UPPER_SEARCH_LIMIT = 15;
private final int editDistance;
private int e_d; // effective edit distance
private final HMatrix hMatrix;
private char[] candidate; /* current replacement */
private int candLen;
private int wordLen; /* length of word being processed */
private char[] word_ff; /* word being processed */
/**
* List of candidate strings, including same additional data such as
* edit distance from the original word.
*/
private final List<CandidateData> candidates = new ArrayList<CandidateData>();
private boolean containsSeparators = true;
/**
* Internal reusable buffer for encoding words into byte arrays using
* {@link #encoder}.
*/
private ByteBuffer byteBuffer = ByteBuffer.allocate(MAX_WORD_LENGTH);
/**
* Internal reusable buffer for encoding words into byte arrays using
* {@link #encoder}.
*/
private CharBuffer charBuffer = CharBuffer.allocate(MAX_WORD_LENGTH);
/**
* Reusable match result.
*/
private final MatchResult matchResult = new MatchResult();
/**
* Features of the compiled dictionary.
*
* @see DictionaryMetadata
*/
private final DictionaryMetadata dictionaryMetadata;
/**
* Charset encoder for the FSA.
*/
private final CharsetEncoder encoder;
/**
* Charset decoder for the FSA.
*/
protected final CharsetDecoder decoder;
/** An FSA used for lookups. */
private final FSATraversal matcher;
/** FSA's root node. */
private final int rootNode;
/**
* The FSA we are using.
*/
protected final FSA fsa;
/** An iterator for walking along the final states of {@link #fsa}. */
private final FSAFinalStatesIterator finalStatesIterator;
public Speller(final Dictionary dictionary) {
this(dictionary, 1);
}
public Speller(final Dictionary dictionary, final int editDistance) {
this(dictionary, editDistance, true);
}
public Speller(final Dictionary dictionary, final int editDistance, final boolean convertCase) {
this.editDistance = editDistance;
hMatrix = new HMatrix(editDistance, MAX_WORD_LENGTH);
this.dictionaryMetadata = dictionary.metadata;
this.rootNode = dictionary.fsa.getRootNode();
this.fsa = dictionary.fsa;
this.matcher = new FSATraversal(fsa);
this.finalStatesIterator = new FSAFinalStatesIterator(fsa, rootNode);
if (rootNode == 0) {
throw new IllegalArgumentException(
"Dictionary must have at least the root node.");
}
if (dictionaryMetadata == null) {
throw new IllegalArgumentException(
"Dictionary metadata must not be null.");
}
encoder = dictionaryMetadata.getEncoder();
decoder = dictionaryMetadata.getDecoder();
// Multibyte separator will result in an exception here.
dictionaryMetadata.getSeparatorAsChar();
}
/**
* Encode a character sequence into a byte buffer, optionally expanding
* buffer.
*/
private ByteBuffer charsToBytes(final CharBuffer chars, ByteBuffer bytes) {
bytes.clear();
final int maxCapacity = (int) (chars.remaining() * encoder.maxBytesPerChar());
if (bytes.capacity() <= maxCapacity) {
bytes = ByteBuffer.allocate(maxCapacity);
}
chars.mark();
encoder.reset();
if (encoder.encode(chars, bytes, true).isError()) {
// in the case of encoding errors, clear the buffer
bytes.clear();
}
bytes.flip();
chars.reset();
return bytes;
}
private ByteBuffer charSequenceToBytes(final CharSequence word) {
// Encode word characters into bytes in the same encoding as the FSA's.
charBuffer.clear();
charBuffer = BufferUtils.ensureCapacity(charBuffer, word.length());
for (int i = 0; i < word.length(); i++) {
final char chr = word.charAt(i);
charBuffer.put(chr);
}
charBuffer.flip();
byteBuffer = charsToBytes(charBuffer, byteBuffer);
return byteBuffer;
}
public boolean isMisspelled(final String word) {
// dictionaries usually do not contain punctuation
String wordToCheck = word;
if (!dictionaryMetadata.getInputConversionPairs().isEmpty()) {
wordToCheck = Dictionary.convertText(word,
dictionaryMetadata.getInputConversionPairs()).toString();
}
boolean isAlphabetic = wordToCheck.length() != 1 || isAlphabetic(wordToCheck.charAt(0));
return wordToCheck.length() > 0
&& (!dictionaryMetadata.isIgnoringPunctuation() || isAlphabetic)
&& (!dictionaryMetadata.isIgnoringNumbers() || !containsDigit(wordToCheck))
&& !(dictionaryMetadata.isIgnoringCamelCase() && isCamelCase(wordToCheck))
&& !(dictionaryMetadata.isIgnoringAllUppercase() && isAlphabetic && isAllUppercase(wordToCheck))
&& !isInDictionary(wordToCheck)
&& (!dictionaryMetadata.isConvertingCase() ||
!(!isMixedCase(wordToCheck) && isInDictionary(wordToCheck.toLowerCase(dictionaryMetadata.getLocale()))));
}
/**
* Test whether the word is found in the dictionary.
* @param word the word to be tested
* @return True if it is found.
*/
public boolean isInDictionary(final CharSequence word) {
byteBuffer = charSequenceToBytes(word);
// Try to find a partial match in the dictionary.
final MatchResult match = matcher.match(matchResult,
byteBuffer.array(), 0, byteBuffer.remaining(), rootNode);
if (match.kind == EXACT_MATCH) {
containsSeparators = false;
return true;
}
return containsSeparators
&& match.kind == SEQUENCE_IS_A_PREFIX
&& byteBuffer.remaining() > 0
&& fsa.getArc(match.node, dictionaryMetadata.getSeparator()) != 0;
}
/**
* Get the frequency value for a word form.
* It is taken from the first entry with this word form.
* @param word the word to be tested
* @return frequency value in range: 0..FREQ_RANGE-1 (0: less frequent).
*/
public int getFrequency(final CharSequence word) {
if (!dictionaryMetadata.isFrequencyIncluded()) {
return 0;
}
final byte separator = dictionaryMetadata.getSeparator();
byteBuffer = charSequenceToBytes(word);
final MatchResult match = matcher.match(matchResult, byteBuffer.array(), 0,
byteBuffer.remaining(), rootNode);
if (match.kind == SEQUENCE_IS_A_PREFIX) {
final int arc = fsa.getArc(match.node, separator);
if (arc != 0 && !fsa.isArcFinal(arc)) {
finalStatesIterator.restartFrom(fsa.getEndNode(arc));
if (finalStatesIterator.hasNext()) {
final ByteBuffer bb = finalStatesIterator.next();
final byte[] ba = bb.array();
final int bbSize = bb.remaining();
//the last byte contains the frequency after a separator
return ba[bbSize - 1] - FIRST_RANGE_CODE;
}
}
}
return 0;
}
/**
* Propose suggestions for misspelled run-on words. This algorithm is inspired by
* spell.cc in s_fsa package by Jan Daciuk.
*
* @param original The original misspelled word.
* @return The list of suggested pairs, as space-concatenated strings.
*/
public List<String> replaceRunOnWords(final String original) {
final List<String> candidates = new ArrayList<String>();
if (!isInDictionary(Dictionary.convertText(original,
dictionaryMetadata.getInputConversionPairs()).toString())
&& dictionaryMetadata.isSupportingRunOnWords()) {
final CharSequence ch = original;
for (int i = 2; i < ch.length(); i++) {
// chop from left to right
final CharSequence firstCh = ch.subSequence(0, i);
if (isInDictionary(firstCh) &&
isInDictionary(ch.subSequence(i, ch.length()))) {
if (!dictionaryMetadata.getOutputConversionPairs().isEmpty()) {
candidates.add(firstCh + " " + ch.subSequence(i, ch.length()));
} else {
candidates.add(
Dictionary.convertText(firstCh + " " + ch.subSequence(i, ch.length()),
dictionaryMetadata.getOutputConversionPairs()).toString()
);
}
}
}
}
return candidates;
}
/**
* Find suggestions by using K. Oflazer's algorithm. See Jan Daciuk's s_fsa
* package, spell.cc for further explanation.
*
* @param word
* The original misspelled word.
* @return A list of suggested replacements.
* @throws CharacterCodingException
*/
public List<String> findReplacements(final String w)
throws CharacterCodingException {
String word = w;
if (!dictionaryMetadata.getInputConversionPairs().isEmpty()) {
word = Dictionary.convertText(w,
dictionaryMetadata.getInputConversionPairs()).toString();
}
candidates.clear();
if (word.length() > 0 && word.length() < MAX_WORD_LENGTH && !isInDictionary(word)) {
List<String> wordsToCheck = new ArrayList<String>();
if (dictionaryMetadata.getReplacementPairs() != null) {
for (final String wordChecked : getAllReplacements(word, 0, 0)) {
boolean found = false;
if (isInDictionary(wordChecked)) {
candidates.add(new CandidateData(wordChecked, 0));
found = true;
} else if (dictionaryMetadata.isConvertingCase()) {
String lowerWord = wordChecked.toLowerCase(dictionaryMetadata.getLocale());
String upperWord = wordChecked.toUpperCase(dictionaryMetadata.getLocale());
if (isInDictionary(lowerWord)) {
//add the word as it is in the dictionary, not mixed-case versions of it
candidates.add(new CandidateData(lowerWord, 0));
found = true;
}
if (isInDictionary(upperWord)) {
candidates.add(new CandidateData(upperWord, 0));
found = true;
}
if (lowerWord.length() > 1) {
String firstupperWord = Character.toUpperCase(lowerWord.charAt(0))
+ lowerWord.substring(1);
if (isInDictionary(firstupperWord)) {
candidates.add(new CandidateData(firstupperWord, 0));
found = true;
}
}
}
if (!found) {
wordsToCheck.add(wordChecked);
}
}
} else {
wordsToCheck.add(word);
}
//If at least one candidate was found with the replacement pairs (which are usual errors),
//probably there is no need for more candidates
if (candidates.isEmpty()) {
int i = 1;
for (final String wordChecked : wordsToCheck) {
i++;
if (i > UPPER_SEARCH_LIMIT) { // for performance reasons, do not search too deeply
break;
}
word_ff = wordChecked.toCharArray();
wordLen = word_ff.length;
candidate = new char[MAX_WORD_LENGTH];
candLen = candidate.length;
e_d = wordLen <= editDistance ? wordLen - 1 : editDistance;
charBuffer = BufferUtils.ensureCapacity(charBuffer, MAX_WORD_LENGTH);
byteBuffer = BufferUtils.ensureCapacity(byteBuffer, MAX_WORD_LENGTH);
charBuffer.clear();
byteBuffer.clear();
final byte[] prevBytes = new byte[0];
findRepl(0, fsa.getRootNode(), prevBytes);
}
}
}
Collections.sort(candidates);
//FIXME: I'm an ugly hack
//Use LinkedHashSet to avoid duplicates and keep the order
final Set<String> candStringSet = new LinkedHashSet<String>();
for (final CandidateData cd : candidates) {
candStringSet.add(Dictionary.convertText(cd.getWord(),
dictionaryMetadata.getOutputConversionPairs()).toString());
}
final List<String> candStringList = new ArrayList<String>(candStringSet.size());
candStringList.addAll(candStringSet);
return candStringList;
}
private void findRepl(final int depth, final int node, final byte[] prevBytes)
throws CharacterCodingException {
char separatorChar = dictionaryMetadata.getSeparatorAsChar();
int dist = 0;
for (int arc = fsa.getFirstArc(node); arc != 0; arc = fsa.getNextArc(arc)) {
byteBuffer = BufferUtils.ensureCapacity(byteBuffer, prevBytes.length + 1);
byteBuffer.clear();
byteBuffer.put(prevBytes);
byteBuffer.put(fsa.getArcLabel(arc));
final int bufPos = byteBuffer.position();
byteBuffer.flip();
decoder.reset();
final CoderResult c = decoder.decode(byteBuffer, charBuffer, true);
if (c.isMalformed()) { // assume that only valid
// encodings are there
final byte[] prev = new byte[bufPos];
byteBuffer.position(0);
byteBuffer.get(prev);
if (!fsa.isArcTerminal(arc)) {
findRepl(depth, fsa.getEndNode(arc), prev); // note: depth is not incremented
}
byteBuffer.clear();
} else if (!c.isError()) { // unmappable characters are silently discarded
charBuffer.flip();
candidate[depth] = charBuffer.get();
charBuffer.clear();
byteBuffer.clear();
if (cuted(depth) <= e_d) {
if (Math.abs(wordLen - 1 - depth) <= e_d
&& (dist = ed(wordLen - 1, depth)) <= e_d
&& (fsa.isArcFinal(arc) || isBeforeSeparator(arc))) {
addCandidate(depth, dist);
}
if (!fsa.isArcTerminal(arc)
&& !(containsSeparators && candidate[depth] == separatorChar)) {
findRepl(depth + 1, fsa.getEndNode(arc), new byte[0]);
}
}
}
}
return;
}
private boolean isBeforeSeparator(final int arc) {
if (containsSeparators) {
final int arc1 = fsa.getArc(fsa.getEndNode(arc), dictionaryMetadata.getSeparator());
return arc1 != 0 && !fsa.isArcTerminal(arc1);
}
return false;
}
private void addCandidate(final int depth, final int dist)
throws CharacterCodingException {
final StringBuilder sb = new StringBuilder(depth);
sb.append(candidate, 0, depth + 1);
candidates.add(new CandidateData(sb.toString(), dist));
}
/**
* Calculates edit distance.
*
* @param i length of first word (here: misspelled) - 1;
* @param j length of second word (here: candidate) - 1.
* @return Edit distance between the two words. Remarks: See Oflazer.
*/
public int ed(final int i, final int j) {
int result;
int a, b, c;
if (areEqual(word_ff[i], candidate[j])) {
// last characters are the same
result = hMatrix.get(i, j);
} else if (i > 0 && j > 0 && word_ff[i] == candidate[j - 1]
&& word_ff[i - 1] == candidate[j]) {
// last two characters are transposed
a = hMatrix.get(i - 1, j - 1); // transposition, e.g. ababab, ababba
b = hMatrix.get(i + 1, j); // deletion, e.g. abab, aba
c = hMatrix.get(i, j + 1); // insertion e.g. aba, abab
result = 1 + min(a, b, c);
} else {
// otherwise
a = hMatrix.get(i, j); // replacement, e.g. ababa, ababb
b = hMatrix.get(i + 1, j); // deletion, e.g. ab, a
c = hMatrix.get(i, j + 1); // insertion e.g. a, ab
result = 1 + min(a, b, c);
}
hMatrix.set(i + 1, j + 1, result);
return result;
}
// by Jaume Ortola
private boolean areEqual(char x, char y) {
if (x == y) {
return true;
}
if (dictionaryMetadata.getEquivalentChars() != null) {
if (dictionaryMetadata.getEquivalentChars().containsKey(x)
&& dictionaryMetadata.getEquivalentChars().get(x).contains(y))
return true;
}
if (dictionaryMetadata.isIgnoringDiacritics()) {
String xn = Normalizer.normalize(Character.toString(x), Form.NFD);
String yn = Normalizer.normalize(Character.toString(y), Form.NFD);
if (dictionaryMetadata.isConvertingCase()) {
xn = xn.toLowerCase(dictionaryMetadata.getLocale());
yn = yn.toLowerCase(dictionaryMetadata.getLocale());
}
return xn.charAt(0) == yn.charAt(0);
}
return false;
}
/**
* Calculates cut-off edit distance.
*
* @param depth current length of candidates.
* @return Cut-off edit distance. Remarks: See Oflazer.
*/
public int cuted(final int depth) {
final int l = Math.max(0, depth - e_d); // min chars from word to consider - 1
final int u = Math.min(wordLen - 1, depth + e_d); // max chars from word to
// consider - 1
int min_ed = e_d + 1; // what is to be computed
int d;
for (int i = l; i <= u; i++) {
if ((d = ed(i, depth)) < min_ed) {
min_ed = d;
}
}
return min_ed;
}
private static int min(final int a, final int b, final int c) {
return Math.min(a, Math.min(b, c));
}
/**
* Copy-paste of Character.isAlphabetic() (needed as we require only 1.6)
*
* @param codePoint The input character.
* @return True if the character is a Unicode alphabetic character.
*/
static boolean isAlphabetic(int codePoint) {
return ((1 << Character.UPPERCASE_LETTER
| 1 << Character.LOWERCASE_LETTER
| 1 << Character.TITLECASE_LETTER
| 1 << Character.MODIFIER_LETTER
| 1 << Character.OTHER_LETTER
| 1 << Character.LETTER_NUMBER) >> Character.getType(codePoint) & 1) != 0;
}
/**
* Checks whether a string contains a digit. Used for ignoring words with
* numbers
* @param s Word to be checked.
* @return True if there is a digit inside the word.
*/
static boolean containsDigit(final String s) {
for (int k = 0; k < s.length(); k++) {
if (Character.isDigit(s.charAt(k))) {
return true;
}
}
return false;
}
/**
* Returns true if <code>str</code> is made up of all-uppercase characters
* (ignoring characters for which no upper-/lowercase distinction exists).
*/
boolean isAllUppercase(final String str) {
return str.equals(str.toUpperCase(dictionaryMetadata.getLocale()));
}
/**
* @param str input string
*/
boolean isCapitalizedWord(final String str) {
if (!isEmpty(str)) {
if (Character.isUpperCase(str.charAt(0))) {
String substring = str.substring(1);
return substring.equals(substring.toLowerCase(dictionaryMetadata.getLocale()));
}
}
return false;
}
/**
* Helper method to replace calls to "".equals().
*
* @param str
* String to check
* @return true if string is empty OR null
*/
static boolean isEmpty(final String str) {
return str == null || str.length() == 0;
}
/**
* @param str input str
* @return Returns true if str is MixedCase.
*/
boolean isMixedCase(final String str) {
return !isAllUppercase(str)
&& !isCapitalizedWord(str)
&& !str.equals(str.toLowerCase(dictionaryMetadata.getLocale()));
}
/**
* @return Returns true if str is MixedCase.
*/
public boolean isCamelCase(final String str) {
return !isEmpty(str)
&& !isAllUppercase(str)
&& !isCapitalizedWord(str)
&& Character.isUpperCase(str.charAt(0))
&& (!(str.length() > 1) || Character.isLowerCase(str.charAt(1)))
&& !str.equals(str.toLowerCase(dictionaryMetadata.getLocale()));
}
/**
* Returns a list of all possible replacements of a given string
*/
public List<String> getAllReplacements(final String str, final int fromIndex, final int level) {
List<String> replaced = new ArrayList<String>();
if (level > 6) { // Stop searching at some point
replaced.add(str);
return replaced;
}
StringBuilder sb = new StringBuilder();
sb.append(str);
int index = MAX_WORD_LENGTH;
String key = "";
int keyLength = 0;
boolean found = false;
// find first possible replacement after fromIndex position
for (final String auxKey : dictionaryMetadata.getReplacementPairs().keySet()) {
int auxIndex = sb.indexOf(auxKey, fromIndex);
if (auxIndex > -1 && auxIndex <= index) {
if (!(auxIndex == index && auxKey.length()<keyLength)) { //select the longest possible key
index = auxIndex;
key = auxKey;
keyLength = auxKey.length();
}
}
}
if (index < MAX_WORD_LENGTH) {
for (final String rep : dictionaryMetadata.getReplacementPairs().get(key)) {
if (rep.length() <= key.length() || sb.indexOf(rep) != index) {
// start a branch without replacement (only once per key)
if (!found) {
replaced.addAll(getAllReplacements(str, index + key.length(), level + 1));
found = true;
}
// start a branch with replacement
sb.replace(index, index + key.length(), rep);
replaced.addAll(getAllReplacements(sb.toString(), index + rep.length(), level + 1));
sb.setLength(0);
sb.append(str);
}
}
}
if (!found) {
replaced.add(sb.toString());
}
return replaced;
}
/**
* Sets up the word and candidate. Used only to test the edit distance in
* JUnit tests.
*
* @param word the first word
* @param candidate the second word used for edit distance calculation
*/
void setWordAndCandidate(final String word, final String candidate) {
word_ff = word.toCharArray();
wordLen = word_ff.length;
this.candidate = candidate.toCharArray();
candLen = this.candidate.length;
e_d = wordLen <= editDistance ? wordLen - 1 : editDistance;
}
public final int getWordLen() {
return wordLen;
}
public final int getCandLen() {
return candLen;
}
public final int getEffectiveED() {
return e_d;
}
/**
* Used to sort candidates according to edit distance, and possibly
* according to their frequency in the future.
*
*/
private class CandidateData implements Comparable<CandidateData> {
private final String word;
private final int distance;
CandidateData(final String word, final int distance) {
this.word = word;
this.distance = distance*FREQ_RANGES + FREQ_RANGES - getFrequency(word) - 1;
}
final String getWord() {
return word;
}
final int getDistance() {
return distance;
}
@Override
public int compareTo(final CandidateData cd) {
// Assume no overflow.
return cd.getDistance() > this.distance ? -1 :
cd.getDistance() == this.distance ? 0 : 1;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.