gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.cidrz.webapp.dynasite.utils;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import javax.servlet.ServletException;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.beanutils.ConversionException;
import org.apache.commons.dbutils.QueryLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.cidrz.project.zeprs.valueobject.EncounterData;
import org.cidrz.project.zeprs.valueobject.EncounterDataArchive;
import org.cidrz.project.zeprs.valueobject.gen.PartographStatus;
import org.cidrz.project.zeprs.valueobject.gen.ReferralReasons;
import org.cidrz.project.zeprs.valueobject.report.PatientStatusReport;
import org.cidrz.webapp.dynasite.Constants;
import org.cidrz.webapp.dynasite.dao.CommentDAO;
import org.cidrz.webapp.dynasite.dao.EncounterArchiveDAO;
import org.cidrz.webapp.dynasite.dao.EncountersDAO;
import org.cidrz.webapp.dynasite.dao.OutcomeArchiveDAO;
import org.cidrz.webapp.dynasite.dao.OutcomeDAO;
import org.cidrz.webapp.dynasite.dao.PatientStatusDAO;
import org.cidrz.webapp.dynasite.dao.PregnancyDAO;
import org.cidrz.webapp.dynasite.dao.ProblemArchiveDAO;
import org.cidrz.webapp.dynasite.dao.ProblemDAO;
import org.cidrz.webapp.dynasite.dao.partograph.PartographDAO;
import org.cidrz.webapp.dynasite.exception.ObjectNotFoundException;
import org.cidrz.webapp.dynasite.rules.Outcome;
import org.cidrz.webapp.dynasite.rules.OutcomeArchive;
import org.cidrz.webapp.dynasite.rules.impl.OutcomeImpl;
import org.cidrz.webapp.dynasite.utils.sort.DateVisitOrderComparator;
import org.cidrz.webapp.dynasite.valueobject.Comment;
import org.cidrz.webapp.dynasite.valueobject.DynaSiteObjects;
import org.cidrz.webapp.dynasite.valueobject.Form;
import org.cidrz.webapp.dynasite.valueobject.Partograph;
import org.cidrz.webapp.dynasite.valueobject.Patient;
import org.cidrz.webapp.dynasite.valueobject.Pregnancy;
import org.cidrz.webapp.dynasite.valueobject.Problem;
import org.cidrz.webapp.dynasite.valueobject.ProblemArchive;
import org.cidrz.webapp.dynasite.valueobject.SessionPatient;
import org.cidrz.webapp.dynasite.valueobject.Site;
public class SyncUtils {
/**
* Commons Logging instance.
*/
private static Log log = LogFactory.getFactory().getInstance(SyncUtils.class);
/**
* Updates patient record from data imported from remote site.
* This is for a patient that already exists on the system.
* @param conn
* @param dupSP - sessionPatient of local patient
* @param patient - patient being imported
* @param duplicateId - id of local patient
* @param comments - messages from import process
* @param view
* @throws Exception
* @throws InvocationTargetException
* @throws ClassNotFoundException
* @throws InstantiationException
*/
public static void updatePatientRecord(Connection conn, SessionPatient dupSP, Patient patient, Long duplicateId, StringBuffer comments, Boolean view) throws InstantiationException, ClassNotFoundException, InvocationTargetException, Exception {
Boolean update = false;
Boolean isDirty = false;
Boolean thisIsDirty = false;
Map queries = null;
try {
queries = QueryLoader.instance().load("/" + Constants.SQL_PATIENT_PROPERTIES);
} catch (IOException e) {
e.printStackTrace();
}
// log.debug("Record change for patient: " + patient.getDistrictPatientid() + " ( id: " + duplicateId + ")" + " eq test: " + eq);
// First update encounter values of current encounters.
Long importedPatientId = patient.getId();
int updatesNum = 0;
comments.append("Patient ").append(patient.getDistrictPatientid()).append(" update: ");
// add any new encounters
HashMap currentMap = new HashMap(); // create a hashmap that stores the uuid and created_date, if any
List currentEncounters = EncountersDAO.getAll(conn, duplicateId);
for (int i = 0; i < currentEncounters.size(); i++) {
EncounterData encounterData = (EncounterData) currentEncounters.get(i);
currentMap.put(encounterData.getUuid(), encounterData.getCreated());
}
int currentEncountersSize = currentEncounters.size();
if (view) {
log.debug("currentEncountersSize: " + currentEncountersSize);
}
int importedEncountersSize = 0;
int newRecordsNum = 0;
Map newIdMap = new WeakHashMap();
List importPregnancies = patient.getPregnancyList();
// log.debug("Processing encounters for " + patient.getDistrictPatientid());
for (int i = 0; i < importPregnancies.size(); i++) {
Pregnancy pregnancy = (Pregnancy) importPregnancies.get(i);
List importEncounters = pregnancy.getEncounters();
DateVisitOrderComparator doc = new DateVisitOrderComparator();
Collections.sort(importEncounters, doc);
importedEncountersSize = importEncounters.size();
if (view) {
log.debug("importedEncountersSize: " + importedEncountersSize);
}
for (int j = 0; j < importEncounters.size(); j++) {
EncounterData encounterData = (EncounterData) importEncounters.get(j);
//log.debug("encounter id: " + encounterData + " created: " + encounterData.getCreated() + " site: " + encounterData.getSiteId() + " formId: " + encounterData.getFormId());
String uuid = encounterData.getUuid();
if (!currentMap.containsKey(uuid)) {
update = true;
if (!view) {
SyncUtils.importEncounter(encounterData, duplicateId, dupSP.getCurrentPregnancyId(), conn, encounterData.getCreatedBy(), newIdMap);
}
newRecordsNum ++;
}
}
importedEncountersSize += importEncounters.size();
if (view) {
log.debug("importedEncountersSize: " + importedEncountersSize);
}
//todo: check lastModified date in partographStatus
if (patient.getParentId() == null) {
Long currentPregnancyId = dupSP.getCurrentPregnancyId();
Partograph importedPartograph = pregnancy.getPartograph();
try {
Partograph currentPartograph = PartographDAO.getPartograph(conn, duplicateId, currentPregnancyId);
if (view) {
if (currentPartograph.getPartographStatus() != null) {
String status = "currentPartograph last Mod: " + currentPartograph.getPartographStatus().getLastModified();
log.debug(status);
}
if (importedPartograph.getPartographStatus() != null) {
String status = "importedPartograph last Mod: " + importedPartograph.getPartographStatus().getLastModified();
log.debug(status);
}
} else {
Object result = PartographDAO.update(conn, duplicateId, pregnancy.getId(), importedPartograph, currentPartograph, currentPregnancyId, comments);
if (result != null) {
update = Boolean.TRUE;
PartographStatus partoStatus = currentPartograph.getPartographStatus();
Long siteId = partoStatus.getSiteId();
String username = partoStatus.getLastModifiedBy();
PatientStatusDAO.touchPatientStatus(conn, null, username, siteId, duplicateId);
}
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
thisIsDirty = processOutcomes(conn, dupSP, duplicateId, comments, view, pregnancy);
isDirty = thisIsDirty=true ? true:false;
}
currentMap.clear();
if (update) {
comments.append(newRecordsNum).append(" new records. ");
}
try {
updatesNum += XmlUtils.updateEncounterValues(conn, duplicateId, patient, dupSP, view);
} catch (Exception e) {
e.printStackTrace();
}
if (updatesNum > 0) {
comments.append(updatesNum).append(" values have been modified in patient record. ");
}
// Add new problems
thisIsDirty = processProblems(conn, dupSP, patient, duplicateId, comments, view);
isDirty = thisIsDirty=true ? true:false;
// Add new comments
thisIsDirty = processComments(conn, patient, duplicateId, comments, view);
isDirty = thisIsDirty=true ? true:false;
if (patient.getOutcomeDeletions() != null) {
thisIsDirty = processOutcomeDeletions(conn, patient, comments, duplicateId, view);
isDirty = thisIsDirty=true ? true:false;
}
if (patient.getProblemDeletions() != null) {
thisIsDirty = processProblemDeletions(conn, patient, comments, duplicateId, view);
isDirty = thisIsDirty=true ? true:false;
}
if (patient.getEncounterDeletions() != null) {
thisIsDirty = processEncounterDeletions(conn, patient, comments, duplicateId, view);
isDirty = thisIsDirty=true ? true:false;
}
if ((newIdMap.size() > 0) || (isDirty == true) || (updatesNum > 0)) {
if (view) {
log.debug("New id map: " + newIdMap.toString() + " isDirty: " + isDirty);
} else {
//PatientStatusDAO.update(patient, newIdMap, dupSP.getCurrentPregnancyId(), conn, queries, duplicateId);
PatientStatusReport psr = patient.getPatientStatusreport();
EncounterData enc = new EncounterData();
enc.setLastModified(psr.getLastModified());
enc.setLastModifiedBy(psr.getLastModifiedBy());
enc.setSiteId(psr.getSiteId());
PatientStatusDAO.touchPatientStatus(conn, enc, psr.getLastModifiedBy(), psr.getSiteId(), duplicateId);
}
} else {
String message = " No new records, updated, or deletions.";
comments.append(message);
log.debug(message);
}
}
/**
* Imports comments
* @param conn
* @param patient
* @param duplicateId
* @param comments
* @param view
* @return TODO
* @throws SQLException
* @throws ServletException
*/
private static Boolean processComments(Connection conn, Patient patient, Long duplicateId, StringBuffer comments,
Boolean view) throws SQLException, ServletException {
Boolean isDirty = false;
int currentCommentsSize = 0;
List<Comment> currentComments = CommentDAO.getAll(conn, duplicateId);
if (currentComments != null) {
currentCommentsSize = currentComments.size();
}
int importedCommentsSize = 0;
List<Comment> importComments = patient.getComments();
if (importComments != null) {
importedCommentsSize = importComments.size();
}
Map<String,String> currentCommentMap = new HashMap(); // create a hashmap that stores the uuid
if (currentComments != null) {
// loop through the current items and pop them into Map
for (Comment item : currentComments) {
String uuid = item.getUuid();
currentCommentMap.put(uuid, uuid);
}
}
if (view) {
log.debug(" currentCommentsSize: " + currentCommentsSize + " importedCommentsSize: " + importedCommentsSize);
} else {
for (int i = 0; i < importComments.size(); i++) {
Comment item = (Comment) importComments.get(i);
item.setImportCommentId(item.getId());
String uuid = item.getUuid();
if (currentCommentMap.get(uuid) == null) {
XmlUtils.saveImportedComment(item, conn, comments);
}
isDirty = true;
}
}
currentCommentMap.clear();
return isDirty;
}
/**
* Imports problems
* @param conn
* @param dupSP
* @param patient
* @param duplicateId
* @param comments
* @param view
* @return TODO
* @throws SQLException
* @throws ServletException
*/
private static Boolean processProblems(Connection conn, SessionPatient dupSP,
Patient patient, Long duplicateId, StringBuffer comments,
Boolean view) throws SQLException, ServletException {
Boolean isDirty = false;
int currentProblemsSize = 0;
List<Problem> currentProblems = ProblemDAO.getAll(conn, duplicateId);
if (currentProblems != null) {
currentProblemsSize = currentProblems.size();
}
int importedProblemsSize = 0;
List<Problem> importProblems = patient.getProblems();
if (importProblems != null) {
importedProblemsSize = importProblems.size();
}
Map<String,String> currentProblemMap = new HashMap(); // create a hashmap that stores the uuid
if (currentProblems != null) {
currentProblemsSize = currentProblems.size();
// loop through the current problems and pop them into currentProblemMap
for (Problem problem : currentProblems) {
String problemUuid = problem.getUuid();
currentProblemMap.put(problemUuid, problemUuid);
}
}
if (view) {
log.debug(" currentProblemsSize: " + currentProblemsSize + " importedProblemsSize: " + importedProblemsSize);
} else {
for (int i = 0; i < importProblems.size(); i++) {
Problem problem = (Problem) importProblems.get(i);
problem.setImportProblemId(problem.getId());
problem.setPatientId(duplicateId);
problem.setPregnancyId(dupSP.getCurrentPregnancyId());
String problemUuid = problem.getUuid();
if (currentProblemMap.get(problemUuid) == null) {
Long id = ProblemDAO.save(conn, problem, problem.getCreatedBy(), problem.getSiteId(), problem.getUuid(), problem.getPatientUuid(), problem.getPregnancyUuid());
comments.append(" new problem: id ").append(id);
}
isDirty = true;
}
}
currentProblemMap.clear();
return isDirty;
}
/**
* Import new Outcomes
* @param conn
* @param dupSP
* @param duplicateId
* @param comments
* @param view
* @param pregnancy
* @return TODO
* @throws SQLException
* @throws ServletException
* @throws ObjectNotFoundException
*/
private static Boolean processOutcomes(Connection conn, SessionPatient dupSP, Long duplicateId, StringBuffer comments,
Boolean view, Pregnancy pregnancy) throws SQLException, ServletException, ObjectNotFoundException {
Boolean isDirty = false;
String importedPregnancyUuid = pregnancy.getUuid();
Pregnancy localPregnancy;
try {
localPregnancy = PregnancyDAO.getOne(conn, importedPregnancyUuid);
// Add new outcomes
int currentOutcomesSize = 0;
List<OutcomeImpl> currentOutcomes = OutcomeDAO.getAll(conn, duplicateId, localPregnancy.getId());
Map currentOutcomeMap = new HashMap(); // create a hashmap that stores the uuid
if (currentOutcomes != null) {
currentOutcomesSize = currentOutcomes.size();
// loop through the current problems and pop them into currentOutcomeMap
for (OutcomeImpl outcomeImpl : currentOutcomes) {
String outcomeUuid = outcomeImpl.getOutcomeUuid();
currentOutcomeMap.put(outcomeUuid, outcomeUuid);
}
}
int importedOutcomesSize = 0;
List importOutcomes = pregnancy.getActiveProblems();
if (importOutcomes != null) {
importedOutcomesSize = importOutcomes.size();
}
if (view) {
log.debug(" currentOutcomesSize: " + currentOutcomesSize + " importedOutcomesSize: " + importedOutcomesSize);
} else {
for (int j = 0; j < importOutcomes.size(); j++) {
Class superClazz = importOutcomes.get(j).getClass().getSuperclass();
if (superClazz.equals(Outcome.class)) {
Outcome outcome = (Outcome) importOutcomes.get(j);
outcome.setImportOutcomeId(outcome.getId());
outcome.setPatientId(duplicateId);
outcome.setPregnancyId(dupSP.getCurrentPregnancyId());
String outcomeUuid = outcome.getOutcomeUuid();
if (currentOutcomeMap.get(outcomeUuid) == null) {
// do not have this outcome locally.
try {
// Fetch the id for the imported encounter.
EncounterData encounter = (EncounterData) EncountersDAO.getOneByImportedId(conn, outcome.getEncounterId(), duplicateId, outcome.getSiteId());
Long newId = encounter.getId();
outcome.setEncounterId(newId);
Long id = OutcomeDAO.save(conn, outcome, outcome.getCreatedBy(), outcome.getSiteId());
comments.append(" new outcome: id ").append(id);
} catch (ObjectNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
isDirty = true;
}
}
}
currentOutcomeMap.clear();
} catch (ObjectNotFoundException e1) {
log.debug("Aborting Outcome processing: LocalPregnancy not found for importedPregnancyUuid: " + importedPregnancyUuid + "Local patient id: " + duplicateId + " zeors id: " + dupSP.getDistrictPatientid());
}
return isDirty;
}
/**
* Import a single encounter created at a remote site into a current patient's record
* Current patient is the patient in the master record - on the main ZEPRS server
* @param encounter
* @param patientId - current patient's id
* @param pregnancyId - current patient's preg. id.
* @param conn
* @param username
* @param newIdMap - HashMap to keep track of new id's
* @throws Exception
* @throws ServletException
* @throws SQLException
* @throws InvocationTargetException
* @throws ClassNotFoundException
* @throws InstantiationException
*/
public static void importEncounter(EncounterData encounter, Long patientId, Long pregnancyId, Connection conn, String username, Map newIdMap) throws InstantiationException, ClassNotFoundException, InvocationTargetException, SQLException, ServletException, Exception {
Form formDef;
Long siteId;
EncounterData vo;
if (encounter.getFormId().intValue() != 1) {
if (encounter.getFormId().intValue() == 94) { // need to set encounter_id for referring encounter.
ReferralReasons referralReasons = (ReferralReasons) encounter;
//Long newRefEnc = (Long) newIdMap.get(referralReasons.getField1917());
Long newRefEnc;
try {
// use the encounter_uuid of the referral to set the encounter_id
String encounterUuid = referralReasons.getField2153();
newRefEnc = EncountersDAO.checkEncounterUuid(conn, encounterUuid);
referralReasons.setField1917(newRefEnc.intValue());
} catch (Exception e1) {
log.debug("New referral encounter id not available. Patient: " + patientId + " encounterID: " + referralReasons.getId() + " encounter.getUuid(): " + encounter.getUuid() + " referralReasons.getField1917(): " + referralReasons.getField1917());
}
}
encounter.setImportEncounterId(encounter.getId());
encounter.setPatientId(patientId);
encounter.setPregnancyId(pregnancyId);
formDef = (Form) DynaSiteObjects.getForms().get(encounter.getFormId());
siteId = encounter.getSiteId();
vo = PopulatePatientRecord.importForm(conn, encounter, formDef, siteId, username);
newIdMap.put(vo.getImportEncounterId(), vo.getId());
}
}
/**
* Loops through deleted items passed from the patient object imported from xml and deletes them
* @param conn
* @param patient
* @param comments
* @param duplicateId
* @param view
* @return isDirty - if patient status needs to be updated.
* @throws SQLException
* @throws ServletException
* @throws IOException
* @throws ObjectNotFoundException
*/
static Boolean processOutcomeDeletions(Connection conn, Patient patient, StringBuffer comments, Long duplicateId, Boolean view) throws SQLException, ServletException, ObjectNotFoundException, IOException {
Boolean isDirty = false;
List<OutcomeArchive> deletions = patient.getOutcomeDeletions();
List<OutcomeArchive> currentDeletions = OutcomeArchiveDAO.getAll(conn, duplicateId);
int importDelSize = 0;
int currentDelSize = 0;
if (deletions != null) {
importDelSize = deletions.size();
}
if (currentDeletions != null) {
currentDelSize = currentDeletions.size();
}
Map<String,String> currentItemMap = new HashMap(); // create a hashmap that stores the uuid
if (currentDeletions != null) {
// loop through the current items and pop them into the Map
for (OutcomeArchive item : currentDeletions) {
String uuid = item.getUuid();
currentItemMap.put(uuid, uuid);
}
}
if (view) {
log.debug("Current Outcome Dels: " + currentDelSize + " Import Outcome Dels" + importDelSize);
} else {
if (deletions != null) {
int diff = importDelSize - currentDelSize;
comments.append(" Processing " + diff + " Outcome deletions. ");
for (int i = 0; i < importDelSize; i++) {
OutcomeArchive archive = (OutcomeArchive) deletions.get(i);
Site site = (Site) DynaSiteObjects.getClinicMap().get(archive.getSiteId());
String uuid = archive.getUuid();
if (currentItemMap.get(uuid) == null) {
comments.append(" deleting outcome id: ").append(archive.getId());
PatientRecordUtils.deleteOutcome(conn, uuid, archive.getCreatedBy(), site, archive, patient, duplicateId);
isDirty = true;
}
}
}
}
currentItemMap.clear();
return isDirty;
}
/**
* Loops through deleted items passed from the patient object imported from xml and deletes them
* @param conn
* @param patient
* @param comments
* @param duplicateId
* @param view
* @return TODO
* @throws SQLException
* @throws ServletException
* @throws IOException
* @throws ObjectNotFoundException
*/
static Boolean processProblemDeletions(Connection conn, Patient patient, StringBuffer comments, Long duplicateId, Boolean view) throws SQLException, ServletException, ObjectNotFoundException, IOException {
Boolean isDirty = false;
List<ProblemArchive> deletions = patient.getProblemDeletions();
List<ProblemArchive> currentDeletions = ProblemArchiveDAO.getAll(conn, duplicateId);
int importDelSize = 0;
int currentDelSize = 0;
if (deletions != null) {
importDelSize = deletions.size();
}
if (currentDeletions != null) {
currentDelSize = currentDeletions.size();
}
Map<String,String> currentItemMap = new HashMap(); // create a hashmap that stores the uuid
if (currentDeletions != null) {
// loop through the current items and pop them into the Map
for (ProblemArchive item : currentDeletions) {
String uuid = item.getUuid();
currentItemMap.put(uuid, uuid);
}
}
if (view) {
log.debug("Current Problem Dels: " + currentDelSize + " Import Problem Dels" + importDelSize);
} else {
if (deletions != null) {
int diff = deletions.size() - currentDeletions.size();
comments.append(" Processing " + diff + " Problem deletions. ");
for (int i = 0; i < deletions.size(); i++) {
ProblemArchive archive = (ProblemArchive) deletions.get(i);
Site site = (Site) DynaSiteObjects.getClinicMap().get(archive.getSiteId());
String uuid = archive.getUuid();
if (currentItemMap.get(uuid) == null) {
comments.append(" deleting Problem id: ").append(archive.getId());
PatientRecordUtils.deleteProblem(conn, uuid, archive.getCreatedBy(), site, archive, patient, duplicateId);
}
isDirty = true;
}
}
}
currentItemMap.clear();
return isDirty;
}
/**
* Loop through encounter deletions list and process them.
* @param conn
* @param patient
* @param comments
* @param duplicateId
* @param view
* @return TODO
* @throws Exception
*/
static Boolean processEncounterDeletions(Connection conn, Patient patient, StringBuffer comments, Long duplicateId, Boolean view) throws Exception {
Boolean isDirty = false;
List<EncounterDataArchive> deletions = patient.getEncounterDeletions();
List<EncounterDataArchive> currentDeletions = EncounterArchiveDAO.getAll(conn, duplicateId);
int importDelSize = 0;
int currentDelSize = 0;
if (deletions != null) {
importDelSize = deletions.size();
}
if (currentDeletions != null) {
currentDelSize = currentDeletions.size();
}
Map<String,String> currentItemMap = new HashMap(); // create a hashmap that stores the uuid
if (currentDeletions != null) {
// loop through the current items and pop them into the Map
for (EncounterDataArchive item : currentDeletions) {
String uuid = item.getUuid();
currentItemMap.put(uuid, uuid);
}
}
if (view) {
log.debug("Current Encounter Dels: " + currentDelSize + " Import Encounter Dels" + importDelSize);
} else {
if (deletions != null) {
int diff = deletions.size() - currentDeletions.size();
comments.append(" Processing " + diff + " Problem deletions. ");
for (int i = 0; i < deletions.size(); i++) {
EncounterDataArchive encounterArchive = (EncounterDataArchive) deletions.get(i);
Site site = (Site) DynaSiteObjects.getClinicMap().get(encounterArchive.getSiteId());
String uuid = encounterArchive.getUuid();
if (currentItemMap.get(uuid) == null) {
Long encounterId;
try {
encounterId = EncountersDAO.checkEncounterUuid(conn, uuid);
comments.append(" Deleting encounter id: ").append(encounterArchive.getId());
EncounterData vo = new EncounterData();
vo.setLastModified(encounterArchive.getLastModified());
vo.setLastModifiedBy(encounterArchive.getLastModifiedBy());
vo.setSiteId(encounterArchive.getSiteId());
PatientRecordUtils.deleteEncounter(conn, encounterArchive.getFormId(), encounterId, encounterArchive.getLastModifiedBy(), site, vo);
} catch (ObjectNotFoundException e) {
EncounterData encounter = new EncounterData();
try {
BeanUtils.copyProperties(encounter, encounterArchive);
} catch (ConversionException e1) {
// log.debug("unable to copy value: " + e);
}
encounter.setUuid(uuid);
encounter.setPatientId(duplicateId);
String siteAbbrev = site.getAbbreviation();
String fileName = org.cidrz.webapp.dynasite.Constants.ARCHIVE_PATH + siteAbbrev + "/deletions/" + "enc" + uuid + ".xml";
XmlUtils.save(encounter, fileName);
Long id = EncounterArchiveDAO.saveArchive(conn, encounterArchive.getCreatedBy(), encounter, encounterArchive.getLastModified());
}
}
isDirty = true;
}
}
}
currentItemMap.clear();
return isDirty;
}
}
| |
/*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of irongpm, version 0.3.1,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2014 - 2016 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.irongpm.ifmap;
import java.util.ArrayList;
import java.util.List;
import org.jgrapht.graph.DefaultListenableGraph;
import org.jgrapht.graph.Multigraph;
import de.hshannover.f4.trust.irongpm.ifmap.interfaces.IfmapEdge;
import de.hshannover.f4.trust.irongpm.ifmap.interfaces.IfmapVertex;
import de.hshannover.f4.trust.irongpm.ifmap.interfaces.Metadata;
import de.hshannover.f4.trust.irongpm.listener.GraphVertexModifyEvent;
import de.hshannover.f4.trust.irongpm.listener.VertexStateListenable;
import de.hshannover.f4.trust.irongpm.listener.VertexStateListener;
/**
* Main datastructure class encapsulating a full graph using the JgraphT Listenable graph structure in combination with
* the IF-MAP datatypes.
*
* @author Leonard Renners
*
*/
public class IfmapGraphImpl extends DefaultListenableGraph<IfmapVertex, IfmapEdge> implements IfmapGraph,
VertexStateListenable<IfmapVertex> {
/**
*
*/
private static final long serialVersionUID = -3104953215717098366L;
private Long mLastUpdated;
private List<VertexStateListener<IfmapVertex>> mVertexStateListeners = new ArrayList<>();
/**
* Constructor.
*/
public IfmapGraphImpl() {
super(new Multigraph<IfmapVertex, IfmapEdge>(IfmapEdge.class));
}
/**
* Copy constructor. Creates a new instance on the basis of an existing.
*
* @param oldGraph
* The old graph
*/
public IfmapGraphImpl(IfmapGraphImpl oldGraph) {
super(new Multigraph<IfmapVertex, IfmapEdge>(oldGraph.getEdgeFactory()));
setLastUpdated(oldGraph.getLastUpdated());
for (IfmapVertex v : oldGraph.vertexSet()) {
addVertex(v);
}
for (IfmapEdge e : oldGraph.edgeSet()) {
addEdge(oldGraph.getEdgeSource(e), oldGraph.getEdgeTarget(e), e);
}
}
public void setLastUpdated(Long timestamp) {
mLastUpdated = timestamp;
}
@Override
public Long getLastUpdated() {
return mLastUpdated;
}
@Override
public boolean addMetadataToVertex(IfmapVertex vertex, Metadata m) {
if (!containsVertex(vertex)) {
return false;
}
for (IfmapVertex v : vertexSet()) {
if (v.equals(vertex)) {
if (((IfmapVertexImpl) v).addMetadata(m)) {
fireVertexChanged(v);
return true;
}
}
}
return false;
}
@Override
public boolean removeMetadataFromVertex(IfmapVertex vertex, Metadata m) {
if (!containsVertex(vertex)) {
return false;
}
for (IfmapVertex v : vertexSet()) {
if (v.equals(vertex)) {
if (((IfmapVertexImpl) v).removeMetadata(m)) {
fireVertexChanged(v);
return true;
}
}
}
return false;
}
/**
* Removes metadata from an existing vertex in the graph.
*
* @param vertex
* The vertex to remove metadata from
* @param meta
* The metadata to remove
* @return Whether the operation was successful
*/
public boolean removeMetadataFromVertex(IfmapVertex vertex, List<Metadata> meta) {
boolean changed = false;
if (!containsVertex(vertex)) {
return false;
}
for (Metadata m : meta) {
if (removeMetadataFromVertex(vertex, m)) {
changed = true;
}
}
return changed;
}
@Override
public void addVertexStateListener(VertexStateListener<IfmapVertex> l) {
mVertexStateListeners.add(l);
}
@Override
public void removeVertexStateListener(VertexStateListener<IfmapVertex> l) {
mVertexStateListeners.remove(l);
}
@Override
public void fireVertexChanged(IfmapVertex vertex) {
for (VertexStateListener<IfmapVertex> l : mVertexStateListeners) {
l.vertexChanged(new GraphVertexModifyEvent<IfmapVertex>(this, GraphVertexModifyEvent.VERTEX_MODIFIED,
vertex));
}
}
@Override
public boolean removeEdgeSensitive(IfmapEdge toRemove) {
if (!(toRemove instanceof IfmapEdgeImpl)) {
return false;
}
IfmapEdgeImpl rem = (IfmapEdgeImpl) toRemove;
if (rem.getMetadata().isSingleValue()) {
for (IfmapEdge e : edgeSet()) {
if (rem.equals(e)) {
return removeEdge(e);
}
}
}
return removeEdge(toRemove);
}
@Override
public boolean addEdgeSensitive(IfmapVertex v1, IfmapVertex v2, IfmapEdge toAdd) {
if (!(toAdd instanceof IfmapEdgeImpl)) {
return false;
}
IfmapEdgeImpl add = (IfmapEdgeImpl) toAdd;
if (!add.getMetadata().isSingleValue()) {
return addEdge(v1, v2, toAdd);
} else {
for (IfmapEdge e : edgeSet()) {
if (add.equals(e)) {
if (!e.equalsNonIfmap(add)) {
removeEdgeSensitive(e);
return addEdge(v1, v2, toAdd);
}
}
}
}
return addEdge(v1, v2, toAdd);
}
@Override
public int hashCode() {
if (edgeSet().isEmpty()) {
return vertexSet().hashCode();
} else {
int result = 1;
for (IfmapEdge e : edgeSet()) {
result += e.hashCodeNonIfmap();
}
return result;
}
}
}
| |
/*
* Copyright (C) 2014 EU Edge LLC, http://euedge.com/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.euedge.glass.orientation;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.StringTokenizer;
import android.location.Location;
public class ReplayingOrientationManager implements OrientationManager {
private BufferedReader reader;
private Set<OnChangedListener> listeners;
private Location location;
private float heading;
private float pitch;
private float roll;
private boolean hasInterference;
private long nextOffset;
private char nextType;
private Location nextLocation;
private float nextHeading;
private float nextPitch;
private float nextRoll;
private boolean nextHasInterference;
boolean shouldRun;
Set<ReplayListener> replayListeners;
public interface ReplayListener {
public void onReplayFinsihed();
}
private Thread readerThread = new Thread() {
long startTime;
long elapsedTime;
String line;
@Override
public void run() {
startTime = System.currentTimeMillis();
elapsedTime = 0;
while (shouldRun) {
try {
line = reader.readLine();
if (line == null || line.isEmpty()) {
shouldRun = false;
break;
}
readNextLine(line);
while (elapsedTime < nextOffset) {
try {
Thread.sleep(nextOffset - elapsedTime);
} catch (InterruptedException e) {
if (!shouldRun) {
reader.close();
notifyStop();
return;
}
}
elapsedTime = System.currentTimeMillis() - startTime;
}
updateNextDataAndNotifyListeners();
} catch (IOException e) {
shouldRun = false;
}
}
try {
reader.close();
} catch (IOException e) {
}
notifyStop();
}
};
public ReplayingOrientationManager() {
listeners = new LinkedHashSet<OnChangedListener>();
replayListeners = new LinkedHashSet<ReplayListener>();
}
public ReplayingOrientationManager(File file) throws FileNotFoundException {
reader = new BufferedReader(new FileReader(file));
listeners = new LinkedHashSet<OnChangedListener>();
replayListeners = new LinkedHashSet<ReplayListener>();
}
public void setFile(File file) throws FileNotFoundException {
reader = new BufferedReader(new FileReader(file));
}
@Override
public void addOnChangedListener(OnChangedListener listener) {
listeners.add(listener);
}
@Override
public void removeOnChangedListener(OnChangedListener listener) {
listeners.remove(listener);
}
public void addReplayListener(ReplayListener listener) {
replayListeners.add(listener);
}
public void removeReplayListener(ReplayListener listener) {
replayListeners.remove(listener);
}
@Override
public void start() {
if (!isReplaying()) {
shouldRun = true;
readerThread.start();
}
}
@Override
public void stop() {
shouldRun = false;
readerThread.interrupt();
}
@Override
public boolean isReplaying() {
return shouldRun && readerThread.isAlive();
}
@Override
public boolean hasInterference() {
return hasInterference;
}
@Override
public boolean hasLocation() {
return location != null;
}
@Override
public float getHeading() {
return heading;
}
@Override
public float getPitch() {
return pitch;
}
@Override
public float getRoll() {
return roll;
}
@Override
public Location getLocation() {
// TODO Auto-generated method stub
return null;
}
/**
* Notifies all listeners that the user's orientation has changed.
*/
private void notifyOrientationChanged() {
for (OnChangedListener listener : listeners) {
listener.onOrientationChanged(this);
}
}
/**
* Notifies all listeners that the user's location has changed.
*/
private void notifyLocationChanged() {
for (OnChangedListener listener : listeners) {
listener.onLocationChanged(this);
}
}
/**
* Notifies all listeners that the compass's accuracy has changed.
*/
private void notifyAccuracyChanged() {
for (OnChangedListener listener : listeners) {
listener.onAccuracyChanged(this);
}
}
private void notifyStop() {
for (ReplayListener listener : replayListeners) {
listener.onReplayFinsihed();
}
}
private void readNextLine(String line) {
StringTokenizer tok = new StringTokenizer(line, ",");
nextOffset = Long.parseLong(tok.nextToken());
nextType = tok.nextToken().charAt(0);
switch (nextType) {
case 'O':
nextHeading = Float.parseFloat(tok.nextToken());
nextPitch = Float.parseFloat(tok.nextToken());
nextRoll = Float.parseFloat(tok.nextToken());
break;
case 'A':
nextHasInterference = Boolean.parseBoolean(tok.nextToken());
break;
case 'L':
nextLocation = new Location("ReplayingOrientationManager");
nextLocation.setLatitude(Double.parseDouble(tok.nextToken()));
nextLocation.setLongitude(Double.parseDouble(tok.nextToken()));
nextLocation.setAltitude(Double.parseDouble(tok.nextToken()));
nextLocation.setBearing(Float.parseFloat(tok.nextToken()));
nextLocation.setSpeed(Float.parseFloat(tok.nextToken()));
nextLocation.setAccuracy(Float.parseFloat(tok.nextToken()));
break;
default:
}
}
private void updateNextDataAndNotifyListeners() {
switch (nextType) {
case 'O':
heading = nextHeading;
pitch = nextPitch;
roll = nextRoll;
notifyOrientationChanged();
break;
case 'A':
hasInterference = nextHasInterference;
notifyAccuracyChanged();
break;
case 'L':
location = nextLocation;
notifyLocationChanged();
break;
default:
}
}
@Override
public boolean isRecording() {
return false;
}
}
| |
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.internal.operators;
import static org.junit.Assert.fail;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.*;
import rx.Observable;
import rx.exceptions.MissingBackpressureException;
import rx.functions.Func1;
import rx.internal.util.RxRingBuffer;
import rx.observers.TestSubscriber;
import rx.subjects.PublishSubject;
public class OperatorPublishFunctionTest {
@Test
public void concatTakeFirstLastCompletes() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Observable.range(1, 3).publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return Observable.concat(o.take(5), o.takeLast(5));
}
}).subscribe(ts);
ts.assertValues(1, 2, 3);
ts.assertNoErrors();
ts.assertCompleted();
}
@Test
public void concatTakeFirstLastBackpressureCompletes() {
TestSubscriber<Integer> ts = TestSubscriber.create(0L);
Observable.range(1, 6).publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return Observable.concat(o.take(5), o.takeLast(5));
}
}).subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotCompleted();
ts.requestMore(5);
ts.assertValues(1, 2, 3, 4, 5);
ts.assertNoErrors();
ts.assertNotCompleted();
ts.requestMore(5);
ts.assertValues(1, 2, 3, 4, 5, 6);
ts.assertNoErrors();
ts.assertCompleted();
}
@Test
public void canBeCancelled() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return Observable.concat(o.take(5), o.takeLast(5));
}
}).subscribe(ts);
ps.onNext(1);
ps.onNext(2);
ts.assertValues(1, 2);
ts.assertNoErrors();
ts.assertNotCompleted();
ts.unsubscribe();
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
@Test
public void invalidPrefetch() {
try {
new OnSubscribePublishMulticast<Integer>(-99, false);
fail("Didn't throw IllegalArgumentException");
} catch (IllegalArgumentException ex) {
Assert.assertEquals("prefetch > 0 required but it was -99", ex.getMessage());
}
}
@Test
public void takeCompletes() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o.take(1);
}
}).subscribe(ts);
ps.onNext(1);
ts.assertValues(1);
ts.assertNoErrors();
ts.assertCompleted();
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
@Test
public void oneStartOnly() {
final AtomicInteger startCount = new AtomicInteger();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onStart() {
startCount.incrementAndGet();
}
};
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o.take(1);
}
}).subscribe(ts);
Assert.assertEquals(1, startCount.get());
}
@Test
public void takeCompletesUnsafe() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o.take(1);
}
}).unsafeSubscribe(ts);
ps.onNext(1);
ts.assertValues(1);
ts.assertNoErrors();
ts.assertCompleted();
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
@Test
public void directCompletesUnsafe() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o;
}
}).unsafeSubscribe(ts);
ps.onNext(1);
ps.onCompleted();
ts.assertValues(1);
ts.assertNoErrors();
ts.assertCompleted();
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
@Test
public void oveflowMissingBackpressureException() {
TestSubscriber<Integer> ts = TestSubscriber.create(0);
PublishSubject<Integer> ps = PublishSubject.create();
ps.publish(new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o;
}
}).unsafeSubscribe(ts);
for (int i = 0; i < RxRingBuffer.SIZE * 2; i++) {
ps.onNext(i);
}
ts.assertNoValues();
ts.assertError(MissingBackpressureException.class);
ts.assertNotCompleted();
Assert.assertEquals("Queue full?!", ts.getOnErrorEvents().get(0).getMessage());
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
@Test
public void oveflowMissingBackpressureExceptionDelayed() {
TestSubscriber<Integer> ts = TestSubscriber.create(0);
PublishSubject<Integer> ps = PublishSubject.create();
OperatorPublish.create(ps, new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o;
}
}, true).unsafeSubscribe(ts);
for (int i = 0; i < RxRingBuffer.SIZE * 2; i++) {
ps.onNext(i);
}
ts.requestMore(RxRingBuffer.SIZE);
ts.assertValueCount(RxRingBuffer.SIZE);
ts.assertError(MissingBackpressureException.class);
ts.assertNotCompleted();
Assert.assertEquals("Queue full?!", ts.getOnErrorEvents().get(0).getMessage());
Assert.assertFalse("Source has subscribers?", ps.hasObservers());
}
}
| |
/*
* Copyright (c) 2016 LARUS Business Automation [http://www.larus-ba.it]
* <p>
* This file is part of the "LARUS Integration Framework for Neo4j".
* <p>
* The "LARUS Integration Framework for Neo4j" is licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* <p>
* Created on 18/02/16
*/
package org.neo4j.jdbc.bolt;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.neo4j.driver.internal.types.InternalTypeSystem;
import org.neo4j.driver.Result;
import org.neo4j.driver.types.Type;
import org.neo4j.jdbc.bolt.data.ResultSetData;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertEquals;
/**
* @author AgileLARUS
* @since 3.0.0
*/
public class BoltNeo4jResultSetMetaDataTest {
@Rule public ExpectedException expectedEx = ExpectedException.none();
@BeforeClass public static void initialize() {
ResultSetData.initialize();
}
/*------------------------------*/
/* getColumnCount */
/*------------------------------*/
@Test public void getColumnsCountShouldReturnCorrectNumberEmpty() throws SQLException {
Result resultIterator = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_EMPTY, ResultSetData.RECORD_LIST_EMPTY);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultIterator.keys());
assertEquals(0, resultSet.getColumnCount());
}
@Test public void getColumnsCountShouldReturnCorrectNumberMoreElements() throws SQLException {
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_MORE_ELEMENTS, ResultSetData.RECORD_LIST_MORE_ELEMENTS);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
assertEquals(2, resultSet.getColumnCount());
}
/*------------------------------*/
/* getColumnName */
/*------------------------------*/
@Test public void getColumnNameShouldReturnCorrectColumnName() throws SQLException {
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_MORE_ELEMENTS, ResultSetData.RECORD_LIST_MORE_ELEMENTS);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
assertEquals("columnA", resultSet.getColumnName(1));
}
@Test public void getColumnNameShouldThrowExceptionWhenEmptyCursor() throws SQLException {
expectedEx.expect(SQLException.class);
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_EMPTY, ResultSetData.RECORD_LIST_EMPTY);
ResultSetMetaData resultSetMetaData = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
resultSetMetaData.getColumnName(1);
}
@Test public void getColumnNameShouldThrowExceptionWhenColumnOutOfRange() throws SQLException {
expectedEx.expect(SQLException.class);
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_EMPTY, ResultSetData.RECORD_LIST_EMPTY);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
resultSet.getColumnName(99);
}
@Test public void getColumnNameShouldThrowExceptionIfCursorNull() throws SQLException {
expectedEx.expect(SQLException.class);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, Collections.EMPTY_LIST);
resultSet.getColumnName(1);
}
/*------------------------------*/
/* getColumnLabel */
/*------------------------------*/
@Test public void getColumnLabelShouldReturnCorrectColumnName() throws SQLException {
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_MORE_ELEMENTS, ResultSetData.RECORD_LIST_MORE_ELEMENTS);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
assertEquals("columnA", resultSet.getColumnLabel(1));
}
@Test public void getColumnLabelShouldThrowExceptionWhenEmptyCursor() throws SQLException {
expectedEx.expect(SQLException.class);
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_EMPTY, ResultSetData.RECORD_LIST_EMPTY);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
resultSet.getColumnLabel(1);
}
@Test public void getColumnLabelShouldThrowExceptionWhenColumnOutOfRange() throws SQLException {
expectedEx.expect(SQLException.class);
Result resultCursor = ResultSetData.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_EMPTY, ResultSetData.RECORD_LIST_EMPTY);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, resultCursor.keys());
resultSet.getColumnLabel(99);
}
@Test public void getColumnLabelShouldThrowExceptionIfCursorNull() throws SQLException {
expectedEx.expect(SQLException.class);
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, Collections.EMPTY_LIST);
resultSet.getColumnLabel(1);
}
/*------------------------------*/
/* getSchemaName */
/*------------------------------*/
@Test public void getSchemaNameShouldReturnDefaultValue() throws SQLException {
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, Collections.EMPTY_LIST);
assertEquals("", resultSet.getSchemaName(1));
}
/*------------------------------*/
/* getCatalogName */
/*------------------------------*/
@Test public void getCatalogNameShouldReturnEmptyString() throws SQLException {
ResultSetMetaData resultSet = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST, Collections.EMPTY_LIST);
assertEquals("", resultSet.getCatalogName(1));
}
/*------------------------------*/
/* flattening */
/*------------------------------*/
@Test public void flatteningTestWorking() throws SQLException {
Result resultCursor = ResultSetData
.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_MORE_ELEMENTS_NODES, ResultSetData.RECORD_LIST_MORE_ELEMENTS_NODES);
ResultSetMetaData rsm = BoltNeo4jResultSetMetaData.newInstance(false, Collections.EMPTY_LIST,
Arrays.asList(new String[] { "node", "node.id", "node.label", "node.property2", "node.property1" }));
assertEquals(5, rsm.getColumnCount());
assertEquals("node", rsm.getColumnLabel(1));
assertEquals("node.id", rsm.getColumnLabel(2));
assertEquals("node.label", rsm.getColumnLabel(3));
assertEquals("node.property2", rsm.getColumnLabel(4));
assertEquals("node.property1", rsm.getColumnLabel(5));
}
@Test public void getColumnClassNameTest() throws SQLException {
List<Type> types = Arrays.asList(
InternalTypeSystem.TYPE_SYSTEM.STRING(),
InternalTypeSystem.TYPE_SYSTEM.INTEGER(),
InternalTypeSystem.TYPE_SYSTEM.BOOLEAN(),
InternalTypeSystem.TYPE_SYSTEM.FLOAT(),
InternalTypeSystem.TYPE_SYSTEM.NODE(),
InternalTypeSystem.TYPE_SYSTEM.RELATIONSHIP(),
InternalTypeSystem.TYPE_SYSTEM.PATH(),
InternalTypeSystem.TYPE_SYSTEM.MAP(),
InternalTypeSystem.TYPE_SYSTEM.ANY(),
InternalTypeSystem.TYPE_SYSTEM.LIST(),
InternalTypeSystem.TYPE_SYSTEM.NUMBER(),
InternalTypeSystem.TYPE_SYSTEM.NULL()
);
List<String> cols = Arrays.asList(
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L"
);
ResultSetMetaData rsm = BoltNeo4jResultSetMetaData.newInstance(false, types, cols);
assertEquals("java.lang.String", rsm.getColumnClassName(1));
assertEquals("java.lang.Long", rsm.getColumnClassName(2));
assertEquals("java.lang.Boolean", rsm.getColumnClassName(3));
assertEquals("java.lang.Double", rsm.getColumnClassName(4));
assertEquals("java.lang.Object", rsm.getColumnClassName(5));
assertEquals("java.lang.Object", rsm.getColumnClassName(6));
assertEquals("java.lang.Object", rsm.getColumnClassName(7));
assertEquals("java.util.Map", rsm.getColumnClassName(8));
assertEquals("java.lang.Object", rsm.getColumnClassName(9));
assertEquals("java.sql.Array", rsm.getColumnClassName(10));
assertEquals("java.lang.Double", rsm.getColumnClassName(11));
assertEquals(null, rsm.getColumnClassName(12));
}
@Test public void getColumnTypeTest() throws SQLException {
List<Type> types = Arrays.asList(
InternalTypeSystem.TYPE_SYSTEM.STRING(),
InternalTypeSystem.TYPE_SYSTEM.INTEGER(),
InternalTypeSystem.TYPE_SYSTEM.BOOLEAN(),
InternalTypeSystem.TYPE_SYSTEM.FLOAT(),
InternalTypeSystem.TYPE_SYSTEM.NODE(),
InternalTypeSystem.TYPE_SYSTEM.RELATIONSHIP(),
InternalTypeSystem.TYPE_SYSTEM.PATH(),
InternalTypeSystem.TYPE_SYSTEM.MAP(),
InternalTypeSystem.TYPE_SYSTEM.ANY(),
InternalTypeSystem.TYPE_SYSTEM.LIST(),
InternalTypeSystem.TYPE_SYSTEM.NUMBER(),
InternalTypeSystem.TYPE_SYSTEM.NULL()
);
List<String> cols = Arrays.asList(
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L"
);
ResultSetMetaData rsm = BoltNeo4jResultSetMetaData.newInstance(false, types, cols);
assertEquals(Types.VARCHAR, rsm.getColumnType(1));
assertEquals(Types.INTEGER, rsm.getColumnType(2));
assertEquals(Types.BOOLEAN, rsm.getColumnType(3));
assertEquals(Types.FLOAT, rsm.getColumnType(4));
assertEquals(Types.JAVA_OBJECT, rsm.getColumnType(5));
assertEquals(Types.JAVA_OBJECT, rsm.getColumnType(6));
assertEquals(Types.JAVA_OBJECT, rsm.getColumnType(7));
assertEquals(Types.JAVA_OBJECT, rsm.getColumnType(8));
assertEquals(Types.JAVA_OBJECT, rsm.getColumnType(9));
assertEquals(Types.ARRAY, rsm.getColumnType(10));
assertEquals(Types.FLOAT, rsm.getColumnType(11));
assertEquals(Types.NULL, rsm.getColumnType(12));
}
@Test public void getColumnTypeNameTest() throws SQLException {
Result resultCursor = ResultSetData
.buildResultCursor(ResultSetData.KEYS_RECORD_LIST_MORE_ELEMENTS_NODES, ResultSetData.RECORD_LIST_MORE_ELEMENTS_NODES);
ResultSetMetaData rsm = BoltNeo4jResultSetMetaData.newInstance(false, Arrays.asList(new Type[] { InternalTypeSystem.TYPE_SYSTEM.NODE() }),
Arrays.asList(new String[] { "node" }));
assertEquals(1, rsm.getColumnCount());
assertEquals("node", rsm.getColumnLabel(1));
System.err.println(rsm.getColumnClassName(1));
System.err.println(rsm.getColumnName(1));
System.err.println(rsm.getColumnType(1));
System.err.println(rsm.getColumnTypeName(1));
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.xml.*;
import com.intellij.testFramework.LightCodeInsightTestCase;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.LocalTimeCounter;
import com.intellij.xml.util.XmlTagUtil;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Arrays;
/**
* @author peter
*/
@SuppressWarnings({"ConstantConditions", "EmptyCatchBlock"})
public class XmlTagTest extends LightCodeInsightTestCase {
private static XmlTag createTag(String value) throws IncorrectOperationException {
return XmlElementFactory.getInstance(getProject()).createTagFromText("<foo>" + value + "</foo>");
}
public void testGetTextValue() throws Throwable {
assertEquals("bar", createTag("bar").getValue().getText());
}
public void testCharRefs() throws Throwable {
String[] names = XmlTagUtil.getCharacterEntityNames();
for (String name : names) {
XmlTag tag = createTag("foo&" + name + ";bar");
assertEquals("foo" + XmlTagUtil.getCharacterByEntityName(name) + "bar", tag.getValue().getTrimmedText());
}
}
public void testLocalNSDeclarations() throws Throwable {
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText("<foo xmlns='aaa' xmlns:a='bbbb'/>");
final Object[] nsPrefixes = ArrayUtil.toObjectArray(tag.getLocalNamespaceDeclarations().keySet());
Arrays.sort(nsPrefixes);
assertEquals(2, nsPrefixes.length);
assertEquals("a",nsPrefixes[1]);
assertEquals("",nsPrefixes[0]);
}
public void testCDATA() throws Throwable {
XmlTag tag = createTag("foo<![CDATA[<>&'\"]]>bar");
assertEquals("foo<>&'\"bar", tag.getValue().getTrimmedText());
}
public void testWhitespacesInAttributes() throws Throwable {
XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText("<a c=d>b</a>");
assertEquals("b", tag.getValue().getText());
}
public void testCreateChildTag() throws Throwable {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<a xmlns=\"aNamespace\"/>");
final XmlTag childTag = rootTag.createChildTag("b", "bNamespace", null, true);
assertEquals("bNamespace", childTag.getNamespace());
WriteCommandAction.runWriteCommandAction(null, new Runnable() {
@Override
public void run() {
XmlTag beanTag = (XmlTag)rootTag.add(childTag);
assertEquals("bNamespace", beanTag.getNamespace());
}
});
}
public void testDeleteTag() throws Throwable {
XmlTag aTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<a><b/> </a>");
final XmlTag bTag = aTag.findFirstSubTag("b");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
bTag.delete();
}
});
assertEquals(0, aTag.getSubTags().length);
}
public void testReplaceTag() throws Throwable {
final XmlTag aTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<a><b/></a>");
final XmlTag bTag = aTag.findFirstSubTag("b");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
PsiElement cTag = bTag.replace(XmlElementFactory.getInstance(getProject()).createTagFromText("<c/>"));
assertEquals(1, aTag.getSubTags().length);
assertEquals(cTag, aTag.getSubTags()[0]);
}
});
}
public void testAddText() throws Exception{
final XmlElementFactory elementFactory = XmlElementFactory.getInstance(getProject());
final XmlTag aTag = elementFactory.createTagFromText("<a>1</a>");
final XmlText displayText = elementFactory.createDisplayText("2");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
final PsiElement psiElement = aTag.addAfter(displayText, aTag.getValue().getChildren()[0]);
assertEquals(psiElement.getContainingFile(), aTag.getContainingFile());
}
});
}
public void testWhitespaceInsideTag() throws Exception {
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
XmlElementFactory.getInstance(getProject()).createTagFromText("<p/>").getValue().setText("\n");
}
});
}
public void testSetAttribute_ForXhtml() throws Exception {
XmlFile xhtmlFile = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("dummy.xhtml", "<html/>");
final XmlTag rootTag = xhtmlFile.getDocument().getRootTag();
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.setAttribute("foo", "bar");
}
});
assertEquals(1, rootTag.getAttributes().length);
assertEquals("bar", rootTag.getAttributeValue("foo"));
assertEquals("foo", rootTag.getAttributes()[0].getName());
}
public void testSetAttribute() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html/>");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.setAttribute("foo", "bar");
}
});
assertEquals(1, rootTag.getAttributes().length);
assertEquals("bar", rootTag.getAttributeValue("foo"));
assertEquals("foo", rootTag.getAttributes()[0].getName());
assertEquals("<html foo=\"bar\"/>", rootTag.getText());
}
public void testSetAttributeWithQuotes() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html/>");
final String value = "a \"b\" c";
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.setAttribute("foo", value);
}
});
assertEquals(1, rootTag.getAttributes().length);
assertEquals(value, rootTag.getAttributeValue("foo"));
assertEquals("foo", rootTag.getAttributes()[0].getName());
assertEquals("<html foo='" + value + "'/>", rootTag.getText());
}
public void testSetAttributeWithQuotes2() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html/>");
final String value = "'a \"b\" c'";
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.setAttribute("foo", value);
}
});
final String expected = value.replaceAll("\"", """);
assertEquals(1, rootTag.getAttributes().length);
assertEquals(expected, rootTag.getAttributeValue("foo"));
assertEquals(value, rootTag.getAttribute("foo").getDisplayValue());
assertEquals("foo", rootTag.getAttributes()[0].getName());
assertEquals("<html foo=\"" + expected + "\"/>", rootTag.getText());
}
public void testSetAttributeUpdateText() throws Exception {
final String value = "a \"b\" c";
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html foo='" + value + "'/>");
assertEquals(1, rootTag.getAttributes().length);
assertEquals(value, rootTag.getAttributeValue("foo"));
final XmlAttribute foo = rootTag.getAttribute("foo");
final String text = foo.getValueElement().getText();
assertEquals("'" + value + "'", text);
((PsiLanguageInjectionHost)foo.getValueElement()).updateText(text);
assertEquals("<html foo='" + value + "'/>", rootTag.getText());
}
public void testSetAttributeWithNamespaces() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<ns:tag xmlns:ns=\"xxx\"/>");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.setAttribute("foo", "", "bar");
}
});
assertEquals(2, rootTag.getAttributes().length);
assertEquals("bar", rootTag.getAttributeValue("foo"));
assertEquals("foo", rootTag.getAttributes()[1].getName());
assertEquals("<ns:tag xmlns:ns=\"xxx\" foo=\"bar\"/>", rootTag.getText());
}
public void testTextEdit1() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html>aaa</html>");
final XmlText xmlText = rootTag.getValue().getTextElements()[0];
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
xmlText.removeText(0, 3);
}
});
assertEquals("<html></html>", rootTag.getText());
}
public void testTextEdit2() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html>a<a</html>");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
rootTag.getValue().getTextElements()[0].removeText(0, 3);
}
});
assertEquals("<html></html>", rootTag.getText());
}
public void testTextEdit3() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html>a<a</html>");
final XmlText xmlText = rootTag.getValue().getTextElements()[0];
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
xmlText.removeText(1, 2);
}
});
assertEquals(1, xmlText.getChildren().length);
assertEquals("<html>aa</html>", rootTag.getText());
}
public void testTextEdit4() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html>aaa</html>");
final XmlText xmlText = rootTag.getValue().getTextElements()[0];
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
xmlText.removeText(1, 2);
}
});
assertEquals(1, xmlText.getChildren().length);
assertEquals("<html>aa</html>", rootTag.getText());
}
public void testTextEdit5() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html><x>xxx</x>\n<y>yyy</y></html>");
final XmlTag y = rootTag.findFirstSubTag("y");
final PsiFile file = y.getContainingFile();
String text = y.getValue().getText();
TextRange textRange = y.getValue().getTextRange();
assertEquals(text, textRange.substring(file.getText()));
new WriteCommandAction(getProject(), file) {
@Override
protected void run(@NotNull final Result result) throws Throwable {
CodeStyleManager.getInstance(getProject()).adjustLineIndent(file, y.getTextOffset());
}
}.execute();
text = y.getValue().getText();
textRange = y.getValue().getTextRange();
assertEquals(text, textRange.substring(file.getText()));
}
public void testTextEdit6() throws Exception {
final XmlTag rootTag = XmlElementFactory.getInstance(getProject()).createTagFromText("<html>a<b>1</b>c</html>");
final XmlTag xmlTag = rootTag.findFirstSubTag("b");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
xmlTag.delete();
}
});
assertEquals("<html>ac</html>", rootTag.getText());
}
public void testBrace() throws Exception {
final XmlTag tagFromText = XmlElementFactory.getInstance(getProject()).createTagFromText("<a/>");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
tagFromText.getValue().setText("<");
}
});
assertEquals("<", tagFromText.getValue().getTextElements()[0].getValue());
}
public void testEmptyTextRange() throws Exception {
final String text = "<a></a>";
final String name = "test.xhtml";
XmlTag rootTag = createTag(name, text);
TextRange textRange = rootTag.getValue().getTextRange();
assertEquals(3, textRange.getStartOffset());
assertEquals(3, textRange.getEndOffset());
}
public void testDeleteTagBetweenText() throws Exception {
final XmlTag tag = createTag("foo.xhtml", "<p>a<div/>b</p>");
final XmlTag div = tag.getSubTags()[0];
new WriteCommandAction(getProject(), tag.getContainingFile()) {
@Override
protected void run(@NotNull final Result result) throws Throwable {
div.delete();
}
}.execute();
assertEquals("<p>ab</p>", tag.getText());
}
private static XmlTag createTag(final String name, final String text) {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject())
.createFileFromText(name, StdFileTypes.XML, text, LocalTimeCounter.currentTime(), true);
return file.getDocument().getRootTag();
}
// tests whether rangemarker gets changed when texts are merged, passes
public void testRangeMarker1() throws IOException, IncorrectOperationException {
final XmlFile file = (XmlFile)createFile("file.xhtml", "<a>1234<b></b>567</a>");
final XmlTag root = file.getDocument().getRootTag();
final XmlTag tag = root.findFirstSubTag("b");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
@Override
public void run() {
final int offset = tag.getTextOffset();
Document document = PsiDocumentManager.getInstance(getProject()).getDocument(root.getContainingFile());
RangeMarker marker = document.createRangeMarker(offset - 3, offset);
tag.delete();
assertEquals(4, marker.getStartOffset());
assertEquals(7, marker.getEndOffset());
}
}, "", null);
}
});
}
// this one fails, the difference is that we do some manipulations before: move "234" before the tag
public void testRangeMarker2() throws IOException, IncorrectOperationException {
final XmlTag root = createTag("file.xhtml", "<a>1<b>234</b>567</a>");
final XmlTag tag = root.findFirstSubTag("b");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
@Override
public void run() {
Document document = PsiDocumentManager.getInstance(getProject()).getDocument(root.getContainingFile());
XmlTagChild child = tag.getValue().getChildren()[0];
assertTrue(child instanceof XmlText && child.getText().equals("234"));
try {
tag.getParent().addBefore(child, tag);
assertEquals(7, tag.getTextOffset());
RangeMarker marker = document.createRangeMarker(4, 7);
tag.delete();
assertEquals(4, marker.getStartOffset());
assertEquals(7, marker.getEndOffset());
}
catch (IncorrectOperationException e) {
}
}
}, "", null);
}
});
}
// the previous test relveals one problem with text merge, "234" in fact is not merged with "1"
public void test3() throws IOException, IncorrectOperationException {
final XmlTag root = XmlElementFactory.getInstance(getProject()).createTagFromText("<a>1<b>234</b>456</a>");
final XmlTag tag = root.findFirstSubTag("b");
final XmlTagChild child = tag.getValue().getChildren()[0];
assertTrue(child instanceof XmlText && child.getText().equals("234"));
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
assertNotNull(tag.getParent().addBefore(child, tag));
tag.delete();
}
});
assertEquals(1, root.getValue().getChildren().length);
assertEquals("1234456", root.getValue().getChildren()[0].getText());
}
public void test3XHTML() throws IOException, IncorrectOperationException {
final XmlTag root = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>1<b>234</b>456</a>");
final XmlTag tag = root.findFirstSubTag("b");
final XmlTagChild child = tag.getValue().getChildren()[0];
assertTrue(child instanceof XmlText && child.getText().equals("234"));
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
assertNotNull(tag.getParent().addBefore(child, tag));
tag.delete();
}
});
assertEquals(1, root.getValue().getChildren().length);
assertEquals("1234456", root.getValue().getChildren()[0].getText());
}
public void testDisplayText() throws Throwable {
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText(" <foo/>");
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
tag.add(XmlElementFactory.getInstance(getProject()).createDisplayText("aaa\nbbb"));
}
});
assertEquals("aaa\nbbb", tag.getValue().getTextElements()[0].getValue());
}
public void testXHTMLAddBefore1() throws Exception {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("test.xhtml",
"<a>a <b>123</b> c</a>");
final XmlTag tagB = file.getDocument().getRootTag().findFirstSubTag("b");
final XmlTagChild[] tagElements = tagB.getValue().getChildren();
final PsiElement parent = tagB.getParent();
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
PsiElement first = parent.addBefore(tagElements[0], tagB);
assertNotNull(first);
}
});
}
public void testXHTMLSetAttribute1() throws Exception {
new WriteCommandAction.Simple(getProject()) {
@Override
protected void run() throws Throwable {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("test.xhtml", "<a/>");
final XmlTag tagB = file.getDocument().getRootTag();
tagB.setAttribute("a", "");
assertEquals("<a a=\"\"/>", tagB.getText());
tagB.setAttribute("b", "");
assertEquals("<a a=\"\" b=\"\"/>", tagB.getText());
tagB.setAttribute("c", "");
assertEquals("<a a=\"\" b=\"\" c=\"\"/>", tagB.getText());
tagB.getAttributes()[1].delete();
assertEquals("<a a=\"\" c=\"\"/>", tagB.getText());
}
}.execute().throwException();
}
public void testXHTMLNbsp1() throws Exception {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("test.xhtml", "<a> </a>");
final XmlTag tagB = file.getDocument().getRootTag();
assertEquals(tagB.getValue().getTextElements().length, 1);
assertEquals(tagB.getValue().getTextElements()[0].getValue(), "\u00a0");
}
public void testDeleteTagWithMultilineWhitespace1() throws Exception {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("test.xml",
"<a>\n <a/>\n</a>");
final XmlTag tagB = file.getDocument().getRootTag();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tagB.getSubTags()[0].delete();
}
});
assertEquals("<a>\n </a>", tagB.getText());
}
public void testDeleteTagWithMultilineWhitespace2() throws Exception {
final XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject())
.createFileFromText("test.xml", "<a>\n <a>\n <b>\n hasgdgasjdgasdg asgdjhasgd</b>\n </a>\n</a>");
final XmlTag tagB = file.getDocument().getRootTag();
WriteCommandAction.runWriteCommandAction(null, new Runnable(){
@Override
public void run() {
tagB.getSubTags()[0].getSubTags()[0].delete();
}
});
assertEquals("<a>\n <a>\n </a>\n</a>", tagB.getText());
}
public void testXHTMLRangeMarkers2() throws Exception {
XmlTag tag = createTag("file.xhtml", "<a>xyz</a>");
PsiFile psiFile = tag.getContainingFile();
Document document = PsiDocumentManager.getInstance(psiFile.getProject()).getDocument(psiFile);
RangeMarker rangeMarker = document.createRangeMarker(5, 5);
final XmlText text = (XmlText) tag.getValue().getChildren()[0];
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
@Override
public void run() {
try{
text.removeText(2, 3);
}
catch(IncorrectOperationException ioe){}
}
}, "", null, UndoConfirmationPolicy.DO_NOT_REQUEST_CONFIRMATION);
}
});
assertEquals(5, rangeMarker.getStartOffset());
assertEquals(5, rangeMarker.getEndOffset());
}
public void testXHTMLTextInsert() throws Exception {
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>xyz</a>");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tag.getValue().getTextElements()[0].insertText("<", 1);
}
});
assertEquals("<a>x<yz</a>", tag.getText());
}
public void testSimpleTextInsertion() throws Throwable {
doTestSimpleInsertion("xyz", "a");
doTestSimpleInsertion(" xyz", "a");
doTestSimpleInsertion("x yz", "a");
doTestSimpleInsertion("xy z", "a");
doTestSimpleInsertion("xyz ", "a");
doTestSimpleInsertion(" xyz ", "a");
doTestSimpleInsertion(" x y z ", "a");
}
public void testWhitespaceTextInsertion() throws Throwable {
doTestSimpleInsertion("xyz", " ");
}
public void testSimpleTextDeletion() throws Throwable {
doTestSimpleDeletion("xyz");
doTestSimpleDeletion(" xyz");
doTestSimpleDeletion("x yz");
doTestSimpleDeletion("xy z");
doTestSimpleDeletion("xyz ");
doTestSimpleDeletion(" xyz ");
doTestSimpleDeletion(" x y z ");
}
public void testWhitespaceDeletion() throws Throwable {
doTestSimpleDeletion(" xyz");
doTestSimpleDeletion("x yz");
doTestSimpleDeletion("xy z");
doTestSimpleDeletion("xyz ");
doTestSimpleDeletion(" xyz ");
doTestSimpleDeletion(" x y z ");
}
private static void doTestSimpleDeletion(final String text) throws IncorrectOperationException {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
for (int i = 0; i < text.length(); i++) {
for (int j = i; j < text.length(); j++) {
XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>" + text + "</a>");
final PsiElement[] children = tag.getValue().getTextElements();
assertEquals(1, children.length);
final XmlText xmlText = (XmlText)children[0];
assertEquals(text, xmlText.getText());
xmlText.removeText(i, j);
final StringBuilder expected = new StringBuilder(text);
expected.delete(i, j);
assertEquals(expected.toString(), xmlText.getValue());
}
}
}
});
}
private static void doTestSimpleInsertion(final String text, final String textToInsert) throws IncorrectOperationException {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
for (int i = 0; i <= text.length(); i++) {
XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>" + text + "</a>");
final PsiElement[] children = tag.getValue().getTextElements();
assertEquals(1, children.length);
final XmlText xmlText = (XmlText)children[0];
assertEquals(text, xmlText.getValue());
xmlText.insertText(textToInsert, i);
final StringBuilder expected = new StringBuilder(text);
expected.insert(i, textToInsert);
assertEquals(expected.toString(), xmlText.getValue());
}
}
});
}
public void testEscapedInsertion() throws Throwable {
doTestEscapedInsertion("xyz", "&");
doTestEscapedInsertion("xyz", "&&");
doTestEscapedInsertion("xyz", "&x&");
doTestEscapedInsertion("&xyz", "a");
doTestEscapedInsertion("&xyz", " ");
doTestEscapedInsertion("x&yz", "a");
doTestEscapedInsertion("x&yz", " ");
doTestEscapedInsertion("xy&z", "a");
doTestEscapedInsertion("xy&z", " ");
doTestEscapedInsertion("xyz&", "a");
doTestEscapedInsertion("xyz&", " ");
doTestEscapedInsertion("&x&y&z&", "a");
doTestEscapedInsertion(" x&y&z&", "a");
doTestEscapedInsertion(" x y&z&", "a");
doTestEscapedInsertion("&x y&z&", "a");
doTestEscapedInsertion("&x y&z ", "a");
doTestEscapedInsertion("&x&y&z&", "<");
doTestEscapedInsertion("&x&y&z&", ">");
doTestEscapedInsertion("_xyz", "a");
doTestEscapedInsertion("x_yz", "a");
doTestEscapedInsertion("xy_z", "a");
doTestEscapedInsertion("xyz_", "a");
doTestEscapedInsertion("_xyz_", "a");
doTestEscapedInsertion("_x_y_z_", "a");
}
public static void notestEscapedDeletion() {
doTestEscapedDeletion("&");
doTestEscapedDeletion("&&");
doTestEscapedDeletion(" &&");
doTestEscapedDeletion(" & &");
doTestEscapedDeletion(" & & ");
doTestEscapedDeletion(" && ");
doTestEscapedDeletion("&& ");
doTestEscapedDeletion("& ");
doTestEscapedDeletion(" ");
doTestEscapedDeletion("&abc");
doTestEscapedDeletion("a&bc");
doTestEscapedDeletion("ab&c");
doTestEscapedDeletion("abc&");
doTestEscapedDeletion(" &abc");
doTestEscapedDeletion("a &bc");
doTestEscapedDeletion("ab &c");
doTestEscapedDeletion("abc &");
doTestEscapedDeletion("& abc");
doTestEscapedDeletion("a& bc");
doTestEscapedDeletion("ab& c");
doTestEscapedDeletion("abc& ");
}
private static void doTestEscapedInsertion(final String text, final String textToInsert) throws IncorrectOperationException {
String tagText = toEscapedText(text);
for (int i = 0; i <= text.length(); i++) {
XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>" + tagText + "</a>");
final PsiElement[] children = tag.getValue().getTextElements();
assertEquals(1, children.length);
final XmlText xmlText = (XmlText)children[0];
assertEquals(tagText, xmlText.getText());
xmlText.insertText(textToInsert, i);
final StringBuilder expectedDisplay = new StringBuilder(text.replace('_', '\u00a0'));
expectedDisplay.insert(i, textToInsert);
assertEquals(expectedDisplay.toString(), xmlText.getValue());
// final String expectedText = toEscapedText(expectedDisplay.toString());
// assertEquals(expectedText, xmlText.getText());
}
}
private static void doTestEscapedDeletion(final String text) throws IncorrectOperationException {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
String tagText = toEscapedText(text);
for (int i = 0; i < text.length(); i++) {
for (int j = i; j < text.length(); j++) {
XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>" + tagText + "</a>");
final PsiElement[] children = tag.getValue().getTextElements();
assertEquals(1, children.length);
final XmlText xmlText = (XmlText)children[0];
assertEquals(tagText, xmlText.getText());
xmlText.removeText(i, j);
final StringBuilder expectedDisplay = new StringBuilder(text.replace('_', ' '));
expectedDisplay.delete(i, j);
assertEquals(expectedDisplay.toString(), xmlText.getValue());
final String expectedText = toEscapedText(expectedDisplay.toString());
assertEquals(expectedText, xmlText.getText());
}
}
}
});
}
public void testWhitespacesInEmptyXHTMLTag() throws Exception{
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a> <b/> </a>");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tag.findFirstSubTag("b").delete();
}
});
assertEquals("<a> </a>", tag.getText());
}
public void test2() throws Exception {
XmlFile file = (XmlFile)createFile("file.xml", "<a>x y</a>");
XmlTag tag = file.getDocument().getRootTag();
final XmlText xmlText = tag.getValue().getTextElements()[0];
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
@Override
public void run() {
try {
xmlText.insertText("z", 1);
}
catch (IncorrectOperationException e) {
throw new RuntimeException(e);
}
}
}, "", null, UndoConfirmationPolicy.DO_NOT_REQUEST_CONFIRMATION);
}
});
assertEquals("<a>xz y</a>", tag.getText());
}
private static String toDisplay(String text) {
text = text.replaceAll("<", "<");
text = text.replaceAll(">", ">");
text = text.replaceAll("&", "&");
text = text.replaceAll("'", "'");
text = text.replaceAll(""", "\"");
text = text.replaceAll("<!\\[CDATA\\[", "");
text = text.replaceAll("\\]\\]>", "");
return text.replaceAll(" ", "_");
}
private static String toEscapedText(String original) {
String text = original.replaceAll("<", "<");
text = text.replaceAll(">", ">");
text = text.replaceAll("&", "&");
text = text.replaceAll("'", "'");
text = text.replaceAll("\"", """);
text = text.replaceAll("_", " ");
assertEquals(original, toDisplay(text));
return text;
}
public void testCoordinateMappingConsistent() throws Exception {
doCoordinateMappingConsistentFromDisplayText("abc");
doCoordinateMappingConsistentFromDisplayText(" abc");
doCoordinateMappingConsistentFromDisplayText(" a bc");
doCoordinateMappingConsistentFromDisplayText(" a b c");
doCoordinateMappingConsistentFromDisplayText(" a b c ");
doCoordinateMappingConsistentFromDisplayText(" ab c ");
doCoordinateMappingConsistentFromDisplayText(" abc ");
doCoordinateMappingConsistentFromDisplayText("abc ");
doCoordinateMappingConsistentFromDisplayText(" ");
doCoordinateMappingConsistentFromDisplayText("&");
doCoordinateMappingConsistentFromDisplayText("&abc");
doCoordinateMappingConsistentFromDisplayText(" &abc");
doCoordinateMappingConsistentFromDisplayText(" a& bc");
doCoordinateMappingConsistentFromDisplayText(" a &b c");
doCoordinateMappingConsistentFromDisplayText(" a b& c ");
doCoordinateMappingConsistentFromDisplayText(" ab c& ");
doCoordinateMappingConsistentFromDisplayText(" ab&c ");
doCoordinateMappingConsistentFromDisplayText("abc &");
doCoordinateMappingConsistentFromDisplayText("abc&");
doCoordinateMappingConsistentFromDisplayText("ab&c&");
doCoordinateMappingConsistentFromDisplayText("ab&c");
doCoordinateMappingConsistentFromDisplayText("a&b&c");
doCoordinateMappingConsistentFromEscapedText("<![CDATA[ ]]>");
doCoordinateMappingConsistentFromEscapedText("<![CDATA[x]]>");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[x]]>");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[x]]>xxx");
doCoordinateMappingConsistentFromEscapedText("xxx&<![CDATA[x]]>xxx");
doCoordinateMappingConsistentFromEscapedText("xxx&<![CDATA[x]]>&xxx");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[x]]>&xxx");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[xas]]>&xxx");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[xa>s]]>&xxx");
doCoordinateMappingConsistentFromEscapedText("xxx<![CDATA[x<a>s]]>&xxx");
}
public void testNBSP() throws Exception {
final XmlTag tagA = XmlElementFactory.getInstance(getProject()).createXHTMLTagFromText("<a>1<b> </b></a>");
final XmlTag tagB = tagA.findFirstSubTag("b");
final XmlTagChild nbsp = tagB.getValue().getChildren()[0];
assertEquals(" ", nbsp.getText());
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tagA.addBefore(nbsp.copy(), tagB);
}
});
XmlTagChild nbsp1 = tagA.getValue().getChildren()[0];
assertEquals("1 ", nbsp1.getText());
}
private static void doCoordinateMappingConsistentFromDisplayText(final String text) throws IncorrectOperationException {
doCoordinateMappingConsistentFromEscapedText(toEscapedText(text));
}
private static void doCoordinateMappingConsistentFromEscapedText(final String tagText) throws IncorrectOperationException {
String text = toDisplay(tagText);
XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText("<a>" + tagText + "</a>");
final PsiElement[] children = tag.getValue().getTextElements();
assertEquals(1, children.length);
final XmlText xmlText = (XmlText)children[0];
assertEquals(tagText, xmlText.getText());
for (int i = 0; i <= text.length(); i++) {
final int physical = xmlText.displayToPhysical(i);
final int display = xmlText.physicalToDisplay(physical);
assertEquals("Coords mapping failed for: '" + tagText + "' - " + physical, display, i);
}
assertEquals("Coords mapping failed for: '" + tagText + "'", 0, xmlText.physicalToDisplay(0));
assertEquals(tagText.length(), xmlText.displayToPhysical(text.length()));
assertEquals(text.length(), xmlText.physicalToDisplay(tagText.length()));
}
public void testStrangeCharactesInText() throws Throwable {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
XmlElementFactory.getInstance(getProject()).createTagFromText("<a/>").getValue().setText("@#$%@$%$${${''}");
}
});
}
public void testPsiToDocumentSynchronizationFailed() throws Throwable {
String text = "<wpd><methods> </methods></wpd>";
final File tempFile = FileUtil.createTempFile("idea-test", ".xml");
tempFile.createNewFile();
final FileOutputStream fileOutputStream = new FileOutputStream(tempFile);
fileOutputStream.write(text.getBytes());
fileOutputStream.flush();
fileOutputStream.close();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
@Override
@SuppressWarnings({"ConstantConditions"})
public void run() {
VirtualFileManager.getInstance().syncRefresh();
XmlFile file ;//createTemporaryFile("wpd.xml", text));
try {
file = (XmlFile)getPsiManager().findFile(VfsUtil.findFileByURL(tempFile.toURL()));
}
catch (MalformedURLException e) {
throw new RuntimeException(e);
}
final XmlTag methodTag = file.getDocument().getRootTag().findFirstSubTag("methods");
try {
methodTag.add(XmlElementFactory.getInstance(getProject()).createTagFromText("<method/>"));
}
catch (IncorrectOperationException e) {
throw new RuntimeException(e);
}
}
}, "", null, UndoConfirmationPolicy.DO_NOT_REQUEST_CONFIRMATION);
}
});
}
public void testXmlFormattingException() throws Throwable {
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText("<foo>bar</foo>");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tag.add(XmlElementFactory.getInstance(getProject()).createTagFromText("<bar/>"));
}
});
}
public void testSetNamespace() throws Exception {
XmlFile xhtmlFile = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("dummy.xml",
"<html><body/></html>");
final XmlTag rootTag = xhtmlFile.getDocument().getRootTag();
rootTag.getSubTags()[0].getNamespace(); // fill the cache
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
rootTag.setAttribute("xmlns", "http://www.ru");
}
});
assertEquals("http://www.ru", rootTag.getSubTags()[0].getNamespace());
}
public void testInsert() throws Exception {
String html = "<html><head /><body><hr /></body>\n</html>";
XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("xxx.xhtml", html);
XmlTag body = file.getDocument().getRootTag().findFirstSubTag("body");
final XmlTag hr = body.getSubTags()[0];
final XmlText text = XmlElementFactory.getInstance(getProject()).createDisplayText("p");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
PsiElement element = hr.getParentTag().addAfter(text, hr);
assertEquals(element.getParent(), hr.getParentTag());
}
});
}
public void testCollapse() throws IncorrectOperationException {
final XmlTag tag = XmlElementFactory.getInstance(getProject()).createTagFromText("<foo></foo>");
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tag.collapseIfEmpty();
assertEquals("<foo/>", tag.getText());
final XmlTag tag1 = XmlElementFactory.getInstance(getProject()).createTagFromText("<foo>abc</foo>");
tag1.collapseIfEmpty();
assertEquals("<foo/>", tag1.getText());
final XmlTag tag2 = XmlElementFactory.getInstance(getProject()).createTagFromText("<foo><boo/></foo>");
tag2.collapseIfEmpty();
assertEquals("<foo><boo/></foo>", tag2.getText());
}
});
}
public void testSetName() {
XmlFile file = (XmlFile)PsiFileFactory.getInstance(getProject()).createFileFromText("dummy.xml", XmlFileType.INSTANCE, "<fooBarGoo>1</fooBarGoo>", 0, true);
final XmlTag tag = file.getDocument().getRootTag();
final Document document = file.getViewProvider().getDocument();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
tag.setName("xxx");
assertEquals("<xxx>1</xxx>", tag.getText());
assertEquals("<xxx>1</xxx>", document.getText());
}
});
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.plugin.addrbook.macosx;
import java.util.*;
import java.util.regex.*;
import net.java.sip.communicator.plugin.addrbook.*;
import net.java.sip.communicator.service.contactsource.*;
import net.java.sip.communicator.service.contactsource.ContactDetail.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.*;
/**
* Implements <tt>ContactQuery</tt> for the Address Book of Mac OS X.
*
* @author Lyubomir Marinov
*/
public class MacOSXAddrBookContactQuery
extends AbstractAddrBookContactQuery<MacOSXAddrBookContactSourceService>
{
/**
* The <tt>Logger</tt> used by the <tt>MacOSXAddrBookContactQuery</tt> class
* and its instances for logging output.
*/
private static final Logger logger
= Logger.getLogger(MacOSXAddrBookContactQuery.class);
/**
* The properties of <tt>ABPerson</tt> which are to be queried by the
* <tt>MacOSXAddrBookContactQuery</tt> instances.
*/
public static final long[] ABPERSON_PROPERTIES
= new long[]
{
kABAIMInstantProperty(),
kABEmailProperty(),
kABFirstNameProperty(),
kABFirstNamePhoneticProperty(),
kABICQInstantProperty(),
kABJabberInstantProperty(),
kABLastNameProperty(),
kABLastNamePhoneticProperty(),
kABMiddleNameProperty(),
kABMiddleNamePhoneticProperty(),
kABMSNInstantProperty(),
kABNicknameProperty(),
kABPhoneProperty(),
kABYahooInstantProperty(),
kABPersonFlags(),
kABOrganizationProperty(),
kABMaidenNameProperty(),
kABBirthdayProperty(),
kABJobTitleProperty(),
kABHomePageProperty(),
kABURLsProperty(),
kABCalendarURIsProperty(),
kABAddressProperty(),
kABOtherDatesProperty(),
kABRelatedNamesProperty(),
kABDepartmentProperty(),
kABNoteProperty(),
kABTitleProperty(),
kABSuffixProperty()
};
/**
* The index of the <tt>kABAIMInstantProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABAIMInstantProperty = 0;
/**
* The index of the <tt>kABEmailProperty</tt> <tt>ABPerson</tt> property in
* {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABEmailProperty = 1;
/**
* The index of the <tt>kABFirstNameProperty</tt> <tt>ABPerson</tt> property
* in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABFirstNameProperty = 2;
/**
* The index of the <tt>kABFirstNamePhoneticProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABFirstNamePhoneticProperty = 3;
/**
* The index of the <tt>kABICQInstantProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABICQInstantProperty = 4;
/**
* The index of the <tt>kABJabberInstantProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABJabberInstantProperty = 5;
/**
* The index of the <tt>kABLastNameProperty</tt> <tt>ABPerson</tt> property
* in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABLastNameProperty = 6;
/**
* The index of the <tt>kABLastNamePhoneticProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABLastNamePhoneticProperty = 7;
/**
* The index of the <tt>kABMiddleNameProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABMiddleNameProperty = 8;
/**
* The index of the <tt>kABMiddleNamePhoneticProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABMiddleNamePhoneticProperty = 9;
/**
* The index of the <tt>kABMSNInstantProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABMSNInstantProperty = 10;
/**
* The index of the <tt>kABNicknameProperty</tt> <tt>ABPerson</tt> property
* in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABNicknameProperty = 11;
/**
* The index of the <tt>kABOrganizationProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABOrganizationProperty = 15;
/**
* The index of the <tt>kABPersonFlags</tt> <tt>ABPerson</tt> property in
* {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABPersonFlags = 14;
/**
* The index of the <tt>kABPhoneProperty</tt> <tt>ABPerson</tt> property in
* {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABPhoneProperty = 12;
/**
* The flag which indicates that an <tt>ABRecord</tt> is to be displayed as
* a company.
*/
public static final long kABShowAsCompany = 1;
/**
* The mask which extracts the <tt>kABShowAsXXX</tt> flag from the
* <tt>personFlags</tt> of an <tt>ABPerson</tt>.
*/
public static final long kABShowAsMask = 7;
/**
* The index of the <tt>kABYahooInstantProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABYahooInstantProperty = 13;
/**
* The index of the <tt>kABMaidenNameProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABMaidenNameProperty = 16;
/**
* The index of the <tt>kABBirthdayProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABBirthdayProperty = 17;
/**
* The index of the <tt>kABJobTitleProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABJobTitleProperty = 18;
/**
* The index of the <tt>kABHomePageProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABHomePageProperty = 19;
/**
* The index of the <tt>kABURLsProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABURLsProperty = 20;
/**
* The index of the <tt>kABCalendarURIsProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABCalendarURIsProperty = 21;
/**
* The index of the <tt>kABAddressProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABAddressProperty = 22;
/**
* The index of the <tt>kABOtherDatesProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABOtherDatesProperty = 23;
/**
* The index of the <tt>kABRelatedNamesProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABRelatedNamesProperty = 24;
/**
* The index of the <tt>kABDepartmentProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABDepartmentProperty = 25;
/**
* The index of the <tt>kABNoteProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABNoteProperty = 26;
/**
* The index of the <tt>kABTitleProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABTitleProperty = 27;
/**
* The index of the <tt>kABSuffixProperty</tt> <tt>ABPerson</tt>
* property in {@link #ABPERSON_PROPERTIES}.
*/
public static final int kABSuffixProperty = 28;
/**
* The regex which matches the superfluous parts of an <tt>ABMultiValue</tt>
* label.
*/
private static final Pattern LABEL_PATTERN
= Pattern.compile(
"kAB|Email|Phone|Label|(\\p{Punct}*)",
Pattern.CASE_INSENSITIVE);
static
{
System.loadLibrary("jmacosxaddrbook");
}
/**
* Initializes a new <tt>MacOSXAddrBookContactQuery</tt> which is to perform
* a specific <tt>query</tt> in the Address Book of Mac OS X on behalf of a
* specific <tt>MacOSXAddrBookContactSourceService</tt>.
*
* @param contactSource the <tt>MacOSXAddrBookContactSourceService</tt>
* which is to perform the new <tt>ContactQuery</tt> instance
* @param query the <tt>Pattern</tt> for which <tt>contactSource</tt> i.e.
* the Address Book of Mac OS X is being queried
*/
public MacOSXAddrBookContactQuery(
MacOSXAddrBookContactSourceService contactSource,
Pattern query)
{
super(contactSource, query);
}
/**
* Gets the <tt>imageData</tt> of a specific <tt>ABPerson</tt> instance.
*
* @param person the pointer to the <tt>ABPerson</tt> instance to get the
* <tt>imageData</tt> of
* @return the <tt>imageData</tt> of the specified <tt>ABPerson</tt>
* instance
*/
public static native byte[] ABPerson_imageData(long person);
/**
* Gets the values of a specific set of <tt>ABRecord</tt> properties for a
* specific <tt>ABRecord</tt> instance.
*
* @param record the pointer to the <tt>ABRecord</tt> to get the property
* values of
* @param properties the set of <tt>ABRecord</tt> properties to get the
* values of
* @return the values of the specified set of <tt>ABRecord</tt> properties
* for the specified <tt>ABRecord</tt> instance
*/
public static native Object[] ABRecord_valuesForProperties(
long record,
long[] properties);
/**
* Returns the unique id of a record.
* @param record the record which id is retrieved.
* @return the record id.
*/
public static native String ABRecord_uniqueId(long record);
/**
* Sets property for the supplied person id.
* @param id the person id
* @param property the property to use.
* @param subPropety any sub property if available.
* @param value the value to set.
* @return whether the result was successfully added.
*/
public static native boolean setProperty(
String id, long property, String subPropety, Object value);
/**
* Remove a property.
* @param id the person id.
* @param property the property.
* @return whether the result was successfully removed.
*/
public static native boolean removeProperty(String id, long property);
/**
* Removes a contact from the address book.
*
* @param id the person id.
*
* @return whether the contact was successfully removed.
*/
public static native boolean deleteContact(String id);
/**
* Creates a new address book contact.
*
* @return The identifier of the created contact. null if failed.
*/
public static native String createContact();
/**
* Gets the pointer of the given contact.
*
* @param id the person id.
*
* @return The pointer of the given contact. Null if failed.
*/
public static native long getContactPointer(String id);
/**
* Initializes a new <tt>ContactDetail</tt> instance which is to reperesent
* a specific contact address that is the value of a specific
* <tt>ABPerson</tt> property and, optionally, has a specific label.
*
* @param property the index in {@link #ABPERSON_PROPERTIES} of the
* <tt>ABPerson</tt> property to be represented by <tt>ContactDetail</tt>
* @param contactAddress the contact address to be represented by the new
* <tt>ContactDetail</tt> instance
* @param label an optional label to be added to the set of labels, if any,
* determined by <tt>property</tt>
* @param id The id of the detail.
*
* @return a new <tt>ContactDetail</tt> instance which represents the
* specified <tt>contactAddress</tt>
*/
private ContactDetail createContactDetail(
int property,
String contactAddress,
Object label,
String additionalProperty,
String id)
{
Category c;
SubCategory sc = null;
switch (property)
{
case kABEmailProperty:
c = Category.Email;
break;
case kABPhoneProperty:
c = Category.Phone;
break;
case kABAIMInstantProperty:
sc = SubCategory.AIM;
c = Category.InstantMessaging;
break;
case kABICQInstantProperty:
sc = SubCategory.ICQ;
c = Category.InstantMessaging;
break;
case kABJabberInstantProperty:
sc = SubCategory.Jabber;
c = Category.InstantMessaging;
break;
case kABMSNInstantProperty:
sc = SubCategory.MSN;
c = Category.InstantMessaging;
break;
case kABYahooInstantProperty:
sc = SubCategory.Yahoo;
c = Category.InstantMessaging;
break;
case kABMaidenNameProperty:
case kABFirstNameProperty:
sc = SubCategory.Name;
c = Category.Personal;
break;
case kABFirstNamePhoneticProperty:
sc = SubCategory.Name;
c = Category.Personal;
break;
case kABLastNameProperty:
sc = SubCategory.LastName;
c = Category.Personal;
break;
case kABLastNamePhoneticProperty:
sc = SubCategory.LastName;
c = Category.Personal;
break;
case kABMiddleNameProperty:
case kABMiddleNamePhoneticProperty:
case kABNicknameProperty:
sc = SubCategory.Nickname;
c = Category.Personal;
break;
case kABBirthdayProperty:
case kABURLsProperty:
case kABHomePageProperty:
sc = SubCategory.HomePage;
c = Category.Personal;
break;
case kABOtherDatesProperty:
case kABRelatedNamesProperty:
case kABNoteProperty:
case kABTitleProperty:
case kABSuffixProperty:
c = Category.Personal;
break;
case kABOrganizationProperty:
case kABJobTitleProperty:
sc = SubCategory.JobTitle;
c = Category.Organization;
break;
case kABDepartmentProperty:
c = Category.Organization;
sc = SubCategory.Name;
break;
case kABAddressProperty:
c = Category.Address;
break;
default:
c = null;
break;
}
if (sc == null)
{
if (label == null)
sc = null;
else
{
sc = getSubCategoryFromLabel(label);
}
}
SubCategory[] subCategories;
SubCategory additionalSubCategory = null;
if(additionalProperty != null)
additionalSubCategory = getSubCategoryFromLabel(additionalProperty);
if(additionalSubCategory != null)
subCategories = new SubCategory[]
{ sc, additionalSubCategory };
else
subCategories = new SubCategory[]{ sc };
return new MacOSXAddrBookContactDetail(
property,
contactAddress,
c,
subCategories,
additionalProperty,
id);
}
/**
* Returns the SubCategory corresponding to the given label.
*
* @param label the label to match to a <tt>SubDirectory</tt>
* @return the <tt>SubDirectory</tt> corresponding to the
* given label
*/
private SubCategory getSubCategoryFromLabel(Object label)
{
String labelString
= LABEL_PATTERN.matcher((String) label).replaceAll("").trim();
if (labelString.length() < 1)
return null;
SubCategory subCategory = null;
if (labelString.equalsIgnoreCase("home"))
subCategory = SubCategory.Home;
else if (labelString.equalsIgnoreCase("work"))
subCategory = SubCategory.Work;
else if (labelString.equalsIgnoreCase("other"))
subCategory = SubCategory.Other;
else if (labelString.equalsIgnoreCase("mobile"))
subCategory = SubCategory.Mobile;
else if (labelString.equalsIgnoreCase("homepage"))
subCategory = SubCategory.HomePage;
else if (labelString.equalsIgnoreCase("street"))
subCategory = SubCategory.Street;
else if (labelString.equalsIgnoreCase("state"))
subCategory = SubCategory.State;
else if (labelString.equalsIgnoreCase("ZIP"))
subCategory = SubCategory.PostalCode;
else if (labelString.equalsIgnoreCase("country"))
subCategory = SubCategory.Country;
else if (labelString.equalsIgnoreCase("city"))
subCategory = SubCategory.City;
else if (labelString.equalsIgnoreCase("InstantMessageUsername"))
subCategory = SubCategory.Nickname;
else if (labelString.equalsIgnoreCase("workfax"))
subCategory = SubCategory.Fax;
else if (labelString.equalsIgnoreCase("fax"))
subCategory = SubCategory.Fax;
return subCategory;
}
/**
* Calls back to a specific <tt>PtrCallback</tt> for each <tt>ABPerson</tt>
* found in the Address Book of Mac OS X which matches a specific
* <tt>String</tt> query.
*
* @param query the <tt>String</tt> for which the Address Book of Mac OS X
* is to be queried. <b>Warning</b>: Ignored at the time of this writing.
* @param callback the <tt>PtrCallback</tt> to be notified about the
* matching <tt>ABPerson</tt>s
*/
private static native void foreachPerson(
String query,
PtrCallback callback);
/**
* Gets the <tt>contactDetails</tt> to be set on a <tt>SourceContact</tt>
* which is to represent an <tt>ABPerson</tt> specified by the values of its
* {@link #ABPERSON_PROPERTIES}.
*
* @param values the values of the <tt>ABPERSON_PROPERTIES</tt> which
* represent the <tt>ABPerson</tt> to get the <tt>contactDetails</tt> of
* @param id The id of the detail.
*
* @return the <tt>contactDetails</tt> to be set on a <tt>SourceContact</tt>
* which is to represent the <tt>ABPerson</tt> specified by <tt>values</tt>
*/
private List<ContactDetail> getContactDetails(Object[] values, String id)
{
List<ContactDetail> contactDetails = new LinkedList<ContactDetail>();
for (int i = 0; i < ABPERSON_PROPERTIES.length; i++)
{
int property = i;
Object value = values[property];
if (value instanceof String)
{
String stringValue = (String) value;
if (stringValue.length() != 0)
{
if (kABPhoneProperty == property)
stringValue
= AddrBookActivator.getPhoneNumberI18nService()
.normalize(stringValue);
contactDetails.add(
setCapabilities(
createContactDetail(
property,
stringValue,
null,
null,
id),
property));
}
}
else if (value instanceof Object[])
{
parseMultiDetails(contactDetails,
(Object[]) value,
property,
null,
id);
}
}
return contactDetails;
}
/**
* Parses the multi value data resulting it in contact details.
* @param contactDetails the result list
* @param multiValue the values to parse.
* @param property the current property being parsed.
* @param id The id of the detail.
*/
private void parseMultiDetails(
List<ContactDetail> contactDetails,
Object[] multiValue,
int property,
String label,
String id)
{
if(multiValue == null)
return;
for (int multiValueIndex = 0;
multiValueIndex < multiValue.length;
multiValueIndex += 2)
{
Object subValue = multiValue[multiValueIndex];
if (subValue instanceof String)
{
String stringSubValue = (String) subValue;
if (stringSubValue.length() != 0)
{
if (kABPhoneProperty == property)
{
stringSubValue
= AddrBookActivator.getPhoneNumberI18nService()
.normalize(stringSubValue);
}
Object l = multiValue[multiValueIndex + 1];
contactDetails.add(
setCapabilities(
createContactDetail(
property,
stringSubValue,
l,
label,
id),
property));
}
}
else if (subValue instanceof Object[])
{
String l = null;
Object lObject = multiValue[multiValueIndex + 1];
if(lObject instanceof String)
l = (String)lObject;
parseMultiDetails(contactDetails,
(Object[]) subValue,
property,
l,
id);
}
}
}
/**
* Gets the <tt>displayName</tt> to be set on a <tt>SourceContact</tt>
* which is to represent an <tt>ABPerson</tt> specified by the values of its
* {@link #ABPERSON_PROPERTIES}.
*
* @param values the values of the <tt>ABPERSON_PROPERTIES</tt> which
* represent the <tt>ABPerson</tt> to get the <tt>displayName</tt> of
* @return the <tt>displayName</tt> to be set on a <tt>SourceContact</tt>
* which is to represent the <tt>ABPerson</tt> specified by <tt>values</tt>
*/
private static String getDisplayName(Object[] values)
{
long personFlags
= (values[kABPersonFlags] instanceof Long)
? ((Long) values[kABPersonFlags]).longValue()
: 0;
String displayName;
if ((personFlags & kABShowAsMask) == kABShowAsCompany)
{
displayName
= (values[kABOrganizationProperty] instanceof String)
? (String) values[kABOrganizationProperty]
: "";
if (displayName.length() != 0)
return displayName;
}
displayName
= (values[kABNicknameProperty] instanceof String)
? (String) values[kABNicknameProperty]
: "";
if (displayName.length() != 0)
return displayName;
String firstName
= (values[kABFirstNameProperty] instanceof String)
? (String) values[kABFirstNameProperty]
: "";
if ((firstName.length() == 0)
&& (values[kABFirstNamePhoneticProperty] instanceof String))
{
firstName = (String) values[kABFirstNamePhoneticProperty];
}
String lastName
= (values[kABLastNameProperty] instanceof String)
? (String) values[kABLastNameProperty]
: "";
if ((lastName.length() == 0)
&& (values[kABLastNamePhoneticProperty] instanceof String))
lastName = (String) values[kABLastNamePhoneticProperty];
if ((lastName.length() == 0)
&& (values[kABMiddleNameProperty] instanceof String))
lastName = (String) values[kABMiddleNameProperty];
if ((lastName.length() == 0)
&& (values[kABMiddleNamePhoneticProperty] instanceof String))
lastName = (String) values[kABMiddleNamePhoneticProperty];
if (firstName.length() == 0)
displayName = lastName;
else
{
displayName
= (lastName.length() == 0)
? firstName
: (firstName + " " + lastName);
}
if (displayName.length() != 0)
return displayName;
for (int i = 0; i < ABPERSON_PROPERTIES.length; i++)
{
Object value = values[i];
if (value instanceof String)
{
String stringValue = (String) value;
if (stringValue.length() != 0)
{
displayName = stringValue;
break;
}
}
else if (value instanceof Object[])
{
Object[] multiValue = (Object[]) value;
for (int multiValueIndex = 0;
multiValueIndex < multiValue.length;
multiValueIndex += 2)
{
Object subValue = multiValue[multiValueIndex];
if (subValue instanceof String)
{
String stringSubValue = (String) subValue;
if (stringSubValue.length() != 0)
{
displayName = stringSubValue;
break;
}
}
}
}
}
return displayName;
}
/**
* Gets the organization name to be set on a <tt>SourceContact</tt>.
*
* @param values the values of the <tt>ABPERSON_PROPERTIES</tt> which
* represent the <tt>ABPerson</tt> to get the organization name of.
*
* @return The organization name to be set on a <tt>SourceContact</tt>.
*/
private static String getOrganization(Object[] values)
{
String organization = "";
long personFlags
= (values[kABPersonFlags] instanceof Long)
? ((Long) values[kABPersonFlags]).longValue()
: 0;
if ((personFlags & kABShowAsMask) != kABShowAsCompany)
{
organization = (values[kABOrganizationProperty] instanceof String)
? (String) values[kABOrganizationProperty]
: "";
}
return organization;
}
/**
* Gets the value of the <tt>kABAIMInstantProperty</tt> constant.
*
* @return the value of the <tt>kABAIMInstantProperty</tt> constant
*/
public static native long kABAIMInstantProperty();
/**
* Gets the value of the <tt>kABEmailProperty</tt> constant.
*
* @return the value of the <tt>kABEmailProperty</tt> constant
*/
public static native long kABEmailProperty();
/**
* Gets the value of the <tt>kABFirstNameProperty</tt> constant.
*
* @return the value of the <tt>kABFirstNameProperty</tt> constant
*/
public static native long kABFirstNameProperty();
/**
* Gets the value of the <tt>kABFirstNamePhoneticProperty</tt> constant.
*
* @return the value of the <tt>kABFirstNamePhoneticProperty</tt> constant
*/
public static native long kABFirstNamePhoneticProperty();
/**
* Gets the value of the <tt>kABICQInstantProperty</tt> constant.
*
* @return the value of the <tt>kABICQInstantProperty</tt> constant
*/
public static native long kABICQInstantProperty();
/**
* Gets the value of the <tt>kABJabberInstantProperty</tt> constant.
*
* @return the value of the <tt>kABJabberInstantProperty</tt> constant
*/
public static native long kABJabberInstantProperty();
/**
* Gets the value of the <tt>kABLastNameProperty</tt> constant.
*
* @return the value of the <tt>kABLastNameProperty</tt> constant
*/
public static native long kABLastNameProperty();
/**
* Gets the value of the <tt>kABLastNamePhoneticProperty</tt> constant.
*
* @return the value of the <tt>kABLastNamePhoneticProperty</tt> constant
*/
public static native long kABLastNamePhoneticProperty();
/**
* Gets the value of the <tt>kABMiddleNameProperty</tt> constant.
*
* @return the value of the <tt>kABMiddleNameProperty</tt> constant
*/
public static native long kABMiddleNameProperty();
/**
* Gets the value of the <tt>kABMiddleNamePhoneticProperty</tt> constant.
*
* @return the value of the <tt>kABMiddleNamePhoneticProperty</tt> constant
*/
public static native long kABMiddleNamePhoneticProperty();
/**
* Gets the value of the <tt>kABMSNInstantProperty</tt> constant.
*
* @return the value of the <tt>kABMSNInstantProperty</tt> constant
*/
public static native long kABMSNInstantProperty();
/**
* Gets the value of the <tt>kABNicknameProperty</tt> constant.
*
* @return the value of the <tt>kABNicknameProperty</tt> constant
*/
public static native long kABNicknameProperty();
/**
* Gets the value of the <tt>kABOrganizationProperty</tt> constant.
*
* @return the value of the <tt>kABOrganizationProperty</tt> constant
*/
public static native long kABOrganizationProperty();
/**
* Gets the value of the <tt>kABPersonFlags</tt> constant.
*
* @return the value of the <tt>kABPersonFlags</tt> constant
*/
public static native long kABPersonFlags();
/**
* Gets the value of the <tt>kABPhoneProperty</tt> constant.
*
* @return the value of the <tt>kABPhoneProperty</tt> constant
*/
public static native long kABPhoneProperty();
/**
* Gets the value of the <tt>kABYahooInstantProperty</tt> constant.
*
* @return the value of the <tt>kABYahooInstantProperty</tt> constant
*/
public static native long kABYahooInstantProperty();
/**
* Gets the value of the <tt>kABMaidenNameProperty</tt> constant.
*
* @return the value of the <tt>kABMaidenNameProperty</tt> constant
*/
public static native long kABMaidenNameProperty();
/**
* Gets the value of the <tt>kABBirthdayProperty</tt> constant.
*
* @return the value of the <tt>kABBirthdayProperty</tt> constant
*/
public static native long kABBirthdayProperty();
/**
* Gets the value of the <tt>kABJobTitleProperty</tt> constant.
*
* @return the value of the <tt>kABJobTitleProperty</tt> constant
*/
public static native long kABJobTitleProperty();
/**
* Gets the value of the <tt>kABHomePageProperty</tt> constant.
*
* @return the value of the <tt>kABHomePageProperty</tt> constant
*/
public static native long kABHomePageProperty();
/**
* Gets the value of the <tt>kABURLsProperty</tt> constant.
*
* @return the value of the <tt>kABURLsProperty</tt> constant
*/
public static native long kABURLsProperty();
/**
* Gets the value of the <tt>kABCalendarURIsProperty</tt> constant.
*
* @return the value of the <tt>kABCalendarURIsProperty</tt> constant
*/
public static native long kABCalendarURIsProperty();
/**
* Gets the value of the <tt>kABAddressProperty</tt> constant.
*
* @return the value of the <tt>kABAddressProperty</tt> constant
*/
public static native long kABAddressProperty();
/**
* Gets the value of the <tt>kABOtherDatesProperty</tt> constant.
*
* @return the value of the <tt>kABOtherDatesProperty</tt> constant
*/
public static native long kABOtherDatesProperty();
/**
* Gets the value of the <tt>kABRelatedNamesProperty</tt> constant.
*
* @return the value of the <tt>kABRelatedNamesProperty</tt> constant
*/
public static native long kABRelatedNamesProperty();
/**
* Gets the value of the <tt>kABDepartmentProperty</tt> constant.
*
* @return the value of the <tt>kABDepartmentProperty</tt> constant
*/
public static native long kABDepartmentProperty();
/**
* Gets the value of the <tt>kABInstantMessageProperty</tt> constant.
*
* @return the value of the <tt>kABInstantMessageProperty</tt> constant
*/
public static native long kABInstantMessageProperty();
/**
* Gets the value of the <tt>kABNoteProperty</tt> constant.
*
* @return the value of the <tt>kABNoteProperty</tt> constant
*/
public static native long kABNoteProperty();
/**
* Gets the value of the <tt>kABTitleProperty</tt> constant.
*
* @return the value of the <tt>kABTitleProperty</tt> constant
*/
public static native long kABTitleProperty();
/**
* Gets the value of the <tt>kABSuffixProperty</tt> constant.
*
* @return the value of the <tt>kABSuffixProperty</tt> constant
*/
public static native long kABSuffixProperty();
public static native String kABEmailWorkLabel();
public static native String kABEmailHomeLabel();
public static native String kABAddressHomeLabel();
public static native String kABAddressWorkLabel();
public static native String kABPhoneWorkLabel();
public static native String kABPhoneHomeLabel();
public static native String kABPhoneMobileLabel();
public static native String kABPhoneMainLabel();
public static native String kABPhoneWorkFAXLabel();
public static native String kABHomeLabel();
public static native String kABWorkLabel();
public static native String kABOtherLabel();
public static native String kABAddressStreetKey();
public static native String kABAddressCityKey();
public static native String kABAddressStateKey();
public static native String kABAddressZIPKey();
public static native String kABAddressCountryKey();
/**
* Determines whether a specific <tt>ABPerson</tt> property with a specific
* <tt>value</tt> matches the {@link #query} of this
* <tt>AsyncContactQuery</tt>.
*
* @param property the <tt>ABPerson</tt> property to check
* @param value the value of the <tt>property</tt> to check
* @return <tt>true</tt> if the specified <tt>value</tt> of the specified
* <tt>property</tt> matches the <tt>query</tt> of this
* <tt>AsyncContactQuery</tt>; otherwise, <tt>false</tt>
*/
private boolean matches(int property, String value)
{
return
query.matcher(value).find()
|| ((kABPhoneProperty == property) && phoneNumberMatches(value));
}
/**
* Determines whether an <tt>ABPerson</tt> represented by the values of its
* {@link #ABPERSON_PROPERTIES} matches {@link #query}.
*
* @param values the values of the <tt>ABPERSON_PROPERTIES</tt> which
* represent the <tt>ABPerson</tt> to be determined whether it matches
* <tt>query</tt>
* @return <tt>true</tt> if the <tt>ABPerson</tt> represented by the
* specified <tt>values</tt> matches <tt>query</tt>; otherwise,
* <tt>false</tt>
*/
private boolean matches(Object[] values)
{
int property = 0;
for (Object value : values)
{
if (value instanceof String)
{
if (matches(property, (String) value))
return true;
}
else if (value instanceof Object[])
{
Object[] multiValue = (Object[]) value;
for (int multiValueIndex = 0;
multiValueIndex < multiValue.length;
multiValueIndex += 2)
{
Object subValue = multiValue[multiValueIndex];
if ((subValue instanceof String)
&& matches(property, (String) subValue))
return true;
}
}
property++;
}
return false;
}
/**
* Notifies this <tt>MacOSXAddrBookContactQuery</tt> about a specific
* <tt>ABPerson</tt>.
*
* @param person a pointer to the <tt>ABPerson</tt> instance to notify about
* @return <tt>true</tt> if this <tt>MacOSXAddrBookContactQuery</tt> is to
* continue being called; otherwise, <tt>false</tt>
*/
private boolean onPerson(long person)
{
Object[] values
= ABRecord_valuesForProperties(person, ABPERSON_PROPERTIES);
final String id = ABRecord_uniqueId(person);
String displayName = getDisplayName(values);
if ((displayName.length() != 0)
&& (query.matcher(displayName).find() || matches(values)))
{
List<ContactDetail> contactDetails = getContactDetails(values, id);
if (!contactDetails.isEmpty())
{
final MacOSXAddrBookSourceContact sourceContact
= new MacOSXAddrBookSourceContact(
getContactSource(),
displayName,
contactDetails);
sourceContact.setData(SourceContact.DATA_ID, id);
sourceContact.setDisplayDetails(getOrganization(values));
try
{
byte[] image = ABPerson_imageData(person);
if (image != null)
sourceContact.setImage(image);
}
catch (OutOfMemoryError oome)
{
// Ignore it, the image is not vital.
}
addQueryResult(sourceContact);
}
}
return (getStatus() == QUERY_IN_PROGRESS);
}
/**
* Performs this <tt>AsyncContactQuery</tt> in a background <tt>Thread</tt>.
*
* @see AsyncContactQuery#run()
*/
@Override
protected void run()
{
foreachPerson(
query.toString(),
new PtrCallback()
{
@Override
public boolean callback(long person)
{
return onPerson(person);
}
});
}
/**
* Sets the capabilities of a specific <tt>ContactDetail</tt> (e.g.
* <tt>supportedOpSets</tt>) depending on the <tt>ABPerson</tt> property
* that it stands for.
*
* @param contactDetail the <tt>ContactDetail</tt> to set the capabilities
* of
* @param property the index in {@link #ABPERSON_PROPERTIES} of the
* <tt>ABPerson</tt> property represented by <tt>ContactDetail</tt>
* @return <tt>contactDetail</tt>
*/
private ContactDetail setCapabilities(
ContactDetail contactDetail,
int property)
{
List<Class<? extends OperationSet>> supportedOpSets
= new LinkedList<Class<? extends OperationSet>>();
Map<Class<? extends OperationSet>, String> preferredProtocols
= new HashMap<Class<? extends OperationSet>, String>();
// can be added as contacts
supportedOpSets.add(OperationSetPersistentPresence.class);
switch (property)
{
case kABAIMInstantProperty:
supportedOpSets.add(OperationSetBasicInstantMessaging.class);
preferredProtocols.put(
OperationSetBasicInstantMessaging.class,
ProtocolNames.AIM);
break;
case kABEmailProperty:
supportedOpSets.add(OperationSetBasicTelephony.class);
break;
case kABICQInstantProperty:
supportedOpSets.add(OperationSetBasicInstantMessaging.class);
preferredProtocols.put(
OperationSetBasicInstantMessaging.class,
ProtocolNames.ICQ);
break;
case kABJabberInstantProperty:
supportedOpSets.add(OperationSetBasicInstantMessaging.class);
preferredProtocols.put(
OperationSetBasicInstantMessaging.class,
ProtocolNames.JABBER);
supportedOpSets.add(OperationSetBasicTelephony.class);
preferredProtocols.put(
OperationSetBasicTelephony.class,
ProtocolNames.JABBER);
break;
case kABPhoneProperty:
supportedOpSets.add(OperationSetBasicTelephony.class);
break;
case kABMSNInstantProperty:
supportedOpSets.add(OperationSetBasicInstantMessaging.class);
preferredProtocols.put(
OperationSetBasicInstantMessaging.class,
ProtocolNames.MSN);
break;
case kABYahooInstantProperty:
supportedOpSets.add(OperationSetBasicInstantMessaging.class);
preferredProtocols.put(
OperationSetBasicInstantMessaging.class,
ProtocolNames.YAHOO);
break;
default:
break;
}
contactDetail.setSupportedOpSets(supportedOpSets);
if (!preferredProtocols.isEmpty())
contactDetail.setPreferredProtocols(preferredProtocols);
return contactDetail;
}
/**
* Callback method when receiving notifications for inserted items.
*/
public void inserted(long person)
{
onPerson(person);
}
/**
* Callback method when receiving notifications for updated items.
*/
public void updated(long person)
{
SourceContact sourceContact =
findSourceContactByID(ABRecord_uniqueId(person));
if(sourceContact != null
&& sourceContact instanceof MacOSXAddrBookSourceContact)
{
// let's update the the details
Object[] values
= ABRecord_valuesForProperties(person, ABPERSON_PROPERTIES);
String displayName = getDisplayName(values);
final String id = ABRecord_uniqueId(person);
MacOSXAddrBookSourceContact editableSourceContact
= (MacOSXAddrBookSourceContact)sourceContact;
editableSourceContact.setDisplayName(displayName);
editableSourceContact.setDisplayDetails(getOrganization(values));
List<ContactDetail> contactDetails = getContactDetails(values, id);
editableSourceContact.setDetails(contactDetails);
fireContactChanged(sourceContact);
}
}
/**
* Callback method when receiving notifications for deleted items.
*/
public void deleted(String id)
{
SourceContact sourceContact = findSourceContactByID(id);
if(sourceContact != null)
fireContactRemoved(sourceContact);
}
/**
* Find the property from category and subcategories.
*
* @param category
* @param subCategories
* @return
*/
public static int getProperty(
Category category,
Collection<SubCategory> subCategories)
{
switch(category)
{
case Personal:
if(subCategories.contains(SubCategory.Name))
return kABFirstNameProperty;
else if(subCategories.contains(SubCategory.LastName))
return kABLastNameProperty;
else if(subCategories.contains(SubCategory.Nickname))
return kABNicknameProperty;
else if(subCategories.contains(SubCategory.HomePage))
return kABHomePageProperty;
break;
case Organization:
if(subCategories.contains(SubCategory.JobTitle))
return kABJobTitleProperty;
else
return kABDepartmentProperty;
case Email:
return kABEmailProperty;
case InstantMessaging:
if(subCategories.contains(SubCategory.AIM))
return kABAIMInstantProperty;
else if(subCategories.contains(SubCategory.ICQ))
return kABICQInstantProperty;
else if(subCategories.contains(SubCategory.MSN))
return kABMSNInstantProperty;
else if(subCategories.contains(SubCategory.Jabber))
return kABJabberInstantProperty;
else if(subCategories.contains(SubCategory.Yahoo))
return kABYahooInstantProperty;
break;
case Phone:
return kABPhoneProperty;
case Address:
return kABAddressProperty;
default: return -1;
}
return -1;
}
/**
* Finds the label from category and sub categories.
* @param subCategory
* @return
*/
public static String getLabel(
int property,
SubCategory subCategory,
String subProperty)
{
switch(property)
{
case kABEmailProperty:
if(subCategory == SubCategory.Home)
return kABEmailHomeLabel();
if(subCategory == SubCategory.Work)
return kABEmailWorkLabel();
break;
case kABICQInstantProperty:
case kABAIMInstantProperty:
case kABYahooInstantProperty:
case kABMSNInstantProperty:
case kABJabberInstantProperty:
return subProperty;
case kABPhoneProperty:
if(subCategory == SubCategory.Home)
return kABPhoneHomeLabel();
if(subCategory == SubCategory.Work)
return kABPhoneWorkLabel();
if(subCategory == SubCategory.Fax)
return kABPhoneWorkFAXLabel();
if(subCategory == SubCategory.Mobile)
return kABPhoneMobileLabel();
if(subCategory == SubCategory.Other)
return "other";
break;
case kABAddressProperty:
if(subCategory == SubCategory.Street)
return kABAddressStreetKey();
if(subCategory == SubCategory.City)
return kABAddressCityKey();
if(subCategory == SubCategory.State)
return kABAddressStateKey();
if(subCategory == SubCategory.Country)
return kABAddressCountryKey();
if(subCategory == SubCategory.PostalCode)
return kABAddressZIPKey();
break;
default: return null;
}
return null;
}
/**
* Adds a new empty contact, which will be filled in later.
*
* @param id The ID of the contact to add.
*/
public void addEmptyContact(String id)
{
if(id != null)
{
final MacOSXAddrBookSourceContact sourceContact
= new MacOSXAddrBookSourceContact(
getContactSource(),
null,
new LinkedList<ContactDetail>());
sourceContact.setData(SourceContact.DATA_ID, id);
addQueryResult(sourceContact);
}
}
/**
* Fires a contact changed event for the given contact.
*
* @param sourceContact The contact which has changed.
*/
public void contactChanged(SourceContact sourceContact)
{
fireContactChanged(sourceContact);
}
}
| |
package org.deeplearning4j.spark.api.stats;
import lombok.Data;
import org.apache.commons.io.FilenameUtils;
import org.apache.spark.SparkContext;
import org.deeplearning4j.spark.stats.EventStats;
import org.deeplearning4j.spark.stats.StatsUtils;
import java.io.IOException;
import java.util.*;
/**
* A {@link SparkTrainingStats} implementation for common stats functionality used by most workers
*
* @author Alex Black
*/
@Data
public class CommonSparkTrainingStats implements SparkTrainingStats {
public static final String DEFAULT_DELIMITER = ",";
public static final String FILENAME_TOTAL_TIME_STATS = "workerFlatMapTotalTimeMs.txt";
public static final String FILENAME_GET_INITIAL_MODEL_STATS = "workerFlatMapGetInitialModelTimeMs.txt";
public static final String FILENAME_DATASET_GET_TIME_STATS = "workerFlatMapDataSetGetTimesMs.txt";
public static final String FILENAME_PROCESS_MINIBATCH_TIME_STATS = "workerFlatMapProcessMiniBatchTimesMs.txt";
public static final String WORKER_FLAT_MAP_TOTAL_TIME_MS = "WorkerFlatMapTotalTimeMs";
public static final String WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS = "WorkerFlatMapGetInitialModelTimeMs";
public static final String WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS = "WorkerFlatMapDataSetGetTimesMs";
public static final String WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS = "WorkerFlatMapProcessMiniBatchTimesMs";
private static Set<String> columnNames =
Collections.unmodifiableSet(new LinkedHashSet<>(Arrays.asList(WORKER_FLAT_MAP_TOTAL_TIME_MS,
WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS, WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS,
WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS)));
private SparkTrainingStats trainingWorkerSpecificStats;
private List<EventStats> workerFlatMapTotalTimeMs;
private List<EventStats> workerFlatMapGetInitialModelTimeMs;
private List<EventStats> workerFlatMapDataSetGetTimesMs;
private List<EventStats> workerFlatMapProcessMiniBatchTimesMs;
public CommonSparkTrainingStats() {
}
private CommonSparkTrainingStats(Builder builder) {
this.trainingWorkerSpecificStats = builder.trainingMasterSpecificStats;
this.workerFlatMapTotalTimeMs = builder.workerFlatMapTotalTimeMs;
this.workerFlatMapGetInitialModelTimeMs = builder.workerFlatMapGetInitialModelTimeMs;
this.workerFlatMapDataSetGetTimesMs = builder.workerFlatMapDataSetGetTimesMs;
this.workerFlatMapProcessMiniBatchTimesMs = builder.workerFlatMapProcessMiniBatchTimesMs;
}
@Override
public Set<String> getKeySet() {
Set<String> set = new LinkedHashSet<>(columnNames);
if (trainingWorkerSpecificStats != null)
set.addAll(trainingWorkerSpecificStats.getKeySet());
return set;
}
@Override
public List<EventStats> getValue(String key) {
switch (key) {
case WORKER_FLAT_MAP_TOTAL_TIME_MS:
return workerFlatMapTotalTimeMs;
case WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS:
return workerFlatMapGetInitialModelTimeMs;
case WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS:
return workerFlatMapDataSetGetTimesMs;
case WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS:
return workerFlatMapProcessMiniBatchTimesMs;
default:
if (trainingWorkerSpecificStats != null)
return trainingWorkerSpecificStats.getValue(key);
throw new IllegalArgumentException("Unknown key: \"" + key + "\"");
}
}
@Override
public String getShortNameForKey(String key) {
switch (key) {
case WORKER_FLAT_MAP_TOTAL_TIME_MS:
return "Total";
case WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS:
return "GetInitModel";
case WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS:
return "GetDataSet";
case WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS:
return "ProcessBatch";
default:
if (trainingWorkerSpecificStats != null)
return trainingWorkerSpecificStats.getShortNameForKey(key);
throw new IllegalArgumentException("Unknown key: \"" + key + "\"");
}
}
@Override
public boolean defaultIncludeInPlots(String key) {
switch (key) {
case WORKER_FLAT_MAP_TOTAL_TIME_MS:
case WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS:
case WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS:
return false; //Covered by worker stats generally
case WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS:
return true;
default:
if (trainingWorkerSpecificStats != null)
return trainingWorkerSpecificStats.defaultIncludeInPlots(key);
return false;
}
}
@Override
public void addOtherTrainingStats(SparkTrainingStats other) {
if (!(other instanceof CommonSparkTrainingStats))
throw new IllegalArgumentException(
"Cannot add other training stats: not an instance of CommonSparkTrainingStats");
CommonSparkTrainingStats o = (CommonSparkTrainingStats) other;
workerFlatMapTotalTimeMs.addAll(o.workerFlatMapTotalTimeMs);
workerFlatMapGetInitialModelTimeMs.addAll(o.workerFlatMapGetInitialModelTimeMs);
workerFlatMapDataSetGetTimesMs.addAll(o.workerFlatMapDataSetGetTimesMs);
workerFlatMapProcessMiniBatchTimesMs.addAll(o.workerFlatMapProcessMiniBatchTimesMs);
if (trainingWorkerSpecificStats != null)
trainingWorkerSpecificStats.addOtherTrainingStats(o.trainingWorkerSpecificStats);
else if (o.trainingWorkerSpecificStats != null)
throw new IllegalStateException(
"Cannot merge: training master specific stats is null in one, but not the other");
}
@Override
public SparkTrainingStats getNestedTrainingStats() {
return trainingWorkerSpecificStats;
}
@Override
public String statsAsString() {
StringBuilder sb = new StringBuilder();
String f = SparkTrainingStats.DEFAULT_PRINT_FORMAT;
sb.append(String.format(f, WORKER_FLAT_MAP_TOTAL_TIME_MS));
if (workerFlatMapTotalTimeMs == null)
sb.append("-\n");
else
sb.append(StatsUtils.getDurationAsString(workerFlatMapTotalTimeMs, ",")).append("\n");
sb.append(String.format(f, WORKER_FLAT_MAP_GET_INITIAL_MODEL_TIME_MS));
if (workerFlatMapGetInitialModelTimeMs == null)
sb.append("-\n");
else
sb.append(StatsUtils.getDurationAsString(workerFlatMapGetInitialModelTimeMs, ",")).append("\n");
sb.append(String.format(f, WORKER_FLAT_MAP_DATA_SET_GET_TIMES_MS));
if (workerFlatMapDataSetGetTimesMs == null)
sb.append("-\n");
else
sb.append(StatsUtils.getDurationAsString(workerFlatMapDataSetGetTimesMs, ",")).append("\n");
sb.append(String.format(f, WORKER_FLAT_MAP_PROCESS_MINI_BATCH_TIMES_MS));
if (workerFlatMapProcessMiniBatchTimesMs == null)
sb.append("-\n");
else
sb.append(StatsUtils.getDurationAsString(workerFlatMapProcessMiniBatchTimesMs, ",")).append("\n");
if (trainingWorkerSpecificStats != null)
sb.append(trainingWorkerSpecificStats.statsAsString()).append("\n");
return sb.toString();
}
@Override
public void exportStatFiles(String outputPath, SparkContext sc) throws IOException {
String d = DEFAULT_DELIMITER;
//Total time stats (includes total example counts)
String totalTimeStatsPath = FilenameUtils.concat(outputPath, FILENAME_TOTAL_TIME_STATS);
StatsUtils.exportStats(workerFlatMapTotalTimeMs, totalTimeStatsPath, d, sc);
//"Get initial model" stats:
String getInitialModelStatsPath = FilenameUtils.concat(outputPath, FILENAME_GET_INITIAL_MODEL_STATS);
StatsUtils.exportStats(workerFlatMapGetInitialModelTimeMs, getInitialModelStatsPath, d, sc);
//"DataSet get time" stats:
String getDataSetStatsPath = FilenameUtils.concat(outputPath, FILENAME_DATASET_GET_TIME_STATS);
StatsUtils.exportStats(workerFlatMapDataSetGetTimesMs, getDataSetStatsPath, d, sc);
//Process minibatch time stats:
String processMiniBatchStatsPath = FilenameUtils.concat(outputPath, FILENAME_PROCESS_MINIBATCH_TIME_STATS);
StatsUtils.exportStats(workerFlatMapProcessMiniBatchTimesMs, processMiniBatchStatsPath, d, sc);
if (trainingWorkerSpecificStats != null)
trainingWorkerSpecificStats.exportStatFiles(outputPath, sc);
}
public static class Builder {
private SparkTrainingStats trainingMasterSpecificStats;
private List<EventStats> workerFlatMapTotalTimeMs;
private List<EventStats> workerFlatMapGetInitialModelTimeMs;
private List<EventStats> workerFlatMapDataSetGetTimesMs;
private List<EventStats> workerFlatMapProcessMiniBatchTimesMs;
public Builder trainingMasterSpecificStats(SparkTrainingStats trainingMasterSpecificStats) {
this.trainingMasterSpecificStats = trainingMasterSpecificStats;
return this;
}
public Builder workerFlatMapTotalTimeMs(List<EventStats> workerFlatMapTotalTimeMs) {
this.workerFlatMapTotalTimeMs = workerFlatMapTotalTimeMs;
return this;
}
public Builder workerFlatMapGetInitialModelTimeMs(List<EventStats> workerFlatMapGetInitialModelTimeMs) {
this.workerFlatMapGetInitialModelTimeMs = workerFlatMapGetInitialModelTimeMs;
return this;
}
public Builder workerFlatMapDataSetGetTimesMs(List<EventStats> workerFlatMapDataSetGetTimesMs) {
this.workerFlatMapDataSetGetTimesMs = workerFlatMapDataSetGetTimesMs;
return this;
}
public Builder workerFlatMapProcessMiniBatchTimesMs(List<EventStats> workerFlatMapProcessMiniBatchTimesMs) {
this.workerFlatMapProcessMiniBatchTimesMs = workerFlatMapProcessMiniBatchTimesMs;
return this;
}
public CommonSparkTrainingStats build() {
return new CommonSparkTrainingStats(this);
}
}
}
| |
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.openshift.api.model;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.fabric8.kubernetes.api.model.Doneable;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.internal.HasMetadataComparator;
import io.fabric8.kubernetes.model.annotation.ApiGroup;
import io.fabric8.kubernetes.model.annotation.ApiVersion;
import io.sundr.builder.annotations.Buildable;
import io.sundr.builder.annotations.Inline;
import io.sundr.transform.annotations.VelocityTransformation;
import io.sundr.transform.annotations.VelocityTransformations;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import javax.annotation.Generated;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("org.jsonschema2pojo")
@JsonPropertyOrder({
"apiVersion",
"kind",
"metadata",
"labels",
"objects",
"parameters"
})
@JsonDeserialize(using = JsonDeserializer.None.class)
@EqualsAndHashCode
@ToString
@Buildable(editableEnabled = false, validationEnabled = true, generateBuilderPackage=true, builderPackage = "io.fabric8.kubernetes.api.builder", inline = @Inline(type = Doneable.class, prefix = "Doneable", value = "done"))
@ApiVersion("v1")
@ApiGroup("")
@VelocityTransformations({
@VelocityTransformation(value = "/manifest.vm", outputPath = "openshift.properties", gather = true)
})
public class Template implements HasMetadata {
/**
* (Required)
*/
@JsonProperty("apiVersion")
@NotNull
private String apiVersion = "v1";
/**
* (Required)
*/
@JsonProperty("kind")
@NotNull
private java.lang.String kind = "Template";
/**
*
*
*/
@JsonProperty("labels")
@Valid
private Map<String, String> labels;
/**
*
*
*/
@JsonProperty("metadata")
@Valid
private ObjectMeta metadata;
/**
*
*
*/
@JsonProperty("objects")
@NotNull
@Size(min = 1)
private List<HasMetadata> objects = new ArrayList<HasMetadata>();
/**
*
*
*/
@JsonProperty("parameters")
@Valid
private List<Parameter> parameters = new ArrayList<Parameter>();
@JsonIgnore
private Map<java.lang.String, java.lang.Object> additionalProperties = new HashMap<java.lang.String, java.lang.Object>();
/**
* No args constructor for use in serialization
*/
public Template() {
}
/**
* @param apiVersion
* @param labels
* @param parameters
* @param objects
* @param kind
* @param metadata
*/
public Template(String apiVersion, java.lang.String kind, Map<String, String> labels, ObjectMeta metadata, List<HasMetadata> objects, List<Parameter> parameters) {
this.apiVersion = apiVersion;
this.kind = kind;
this.labels = labels;
this.metadata = metadata;
this.parameters = parameters;
this.objects = objects;
}
/**
* (Required)
*
* @return The apiVersion
*/
@JsonProperty("apiVersion")
public String getApiVersion() {
return apiVersion;
}
/**
* (Required)
*
* @param apiVersion The apiVersion
*/
@JsonProperty("apiVersion")
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
/**
* (Required)
*
* @return The kind
*/
@JsonProperty("kind")
public java.lang.String getKind() {
return kind;
}
/**
* (Required)
*
* @param kind The kind
*/
@JsonProperty("kind")
public void setKind(java.lang.String kind) {
this.kind = kind;
}
/**
* @return The labels
*/
@JsonProperty("labels")
public Map<String, String> getLabels() {
return labels;
}
/**
* @param labels The labels
*/
@JsonProperty("labels")
public void setLabels(Map<String, String> labels) {
this.labels = labels;
}
/**
* @return The metadata
*/
@JsonProperty("metadata")
public ObjectMeta getMetadata() {
return metadata;
}
/**
* @param metadata The metadata
*/
@JsonProperty("metadata")
public void setMetadata(ObjectMeta metadata) {
this.metadata = metadata;
}
/**
* @return The objects
*/
@JsonProperty("objects")
public List<HasMetadata> getObjects() {
List<HasMetadata> sortedObjects = new ArrayList<>(objects);
Collections.sort(sortedObjects, new HasMetadataComparator());
return sortedObjects;
}
public void setObjects(List<HasMetadata> objects) {
this.objects = objects;
}
/**
* @return The parameters
*/
@JsonProperty("parameters")
public List<Parameter> getParameters() {
return parameters;
}
/**
* @param parameters The parameters
*/
@JsonProperty("parameters")
public void setParameters(List<Parameter> parameters) {
this.parameters = parameters;
}
@JsonAnyGetter
public Map<java.lang.String, java.lang.Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(java.lang.String name, java.lang.Object value) {
this.additionalProperties.put(name, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bookkeeper.tests.shaded;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import dlshade.org.apache.bookkeeper.conf.AbstractConfiguration;
import dlshade.org.apache.bookkeeper.conf.ServerConfiguration;
import dlshade.org.apache.bookkeeper.meta.AbstractZkLedgerManagerFactory;
import dlshade.org.apache.bookkeeper.meta.HierarchicalLedgerManagerFactory;
import dlshade.org.apache.bookkeeper.meta.LayoutManager;
import dlshade.org.apache.bookkeeper.meta.LedgerLayout;
import dlshade.org.apache.bookkeeper.meta.LedgerManagerFactory;
import dlshade.org.apache.bookkeeper.util.ReflectionUtils;
import java.io.IOException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
/**
* Test whether the distributedlog-core-shaded jar is generated correctly.
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest({ AbstractZkLedgerManagerFactory.class, ReflectionUtils.class })
public class DistributedLogCoreShadedJarTest {
@Test(expected = ClassNotFoundException.class)
public void testProtobufIsShaded() throws Exception {
Class.forName("com.google.protobuf.Message");
}
@Test
public void testProtobufShadedPath() throws Exception {
Class.forName("dlshade.com.google.protobuf.Message");
}
@Test(expected = ClassNotFoundException.class)
public void testGuavaIsShaded() throws Exception {
Class.forName("com.google.common.cache.Cache");
}
@Test
public void testGuavaShadedPath() throws Exception {
Class.forName("dlshade.com.google.common.cache.Cache");
assertTrue(true);
}
@Test(expected = ClassNotFoundException.class)
public void testZooKeeperIsShaded() throws Exception {
Class.forName("org.apache.zookeeper.ZooKeeper");
}
@Test
public void testZooKeeperShadedPath() throws Exception {
Class.forName("dlshade.org.apache.zookeeper.ZooKeeper");
}
@Test(expected = ClassNotFoundException.class)
public void testBookKeeperCommon() throws Exception {
Class.forName("org.apache.bookkeeper.common.util.OrderedExecutor");
assertTrue(true);
}
@Test
public void testBookKeeperCommonShade() throws Exception {
Class.forName("dlshade.org.apache.bookkeeper.common.util.OrderedExecutor");
assertTrue(true);
}
@Test(expected = ClassNotFoundException.class)
public void testBookKeeperProto() throws Exception {
Class.forName("org.apache.bookkeeper.proto.BookkeeperProtocol");
}
@Test
public void testBookKeeperProtoShade() throws Exception {
Class.forName("dlshade.org.apache.bookkeeper.proto.BookkeeperProtocol");
assertTrue(true);
}
@Test(expected = ClassNotFoundException.class)
public void testCirceChecksum() throws Exception {
Class.forName("com.scurrilous.circe.checksum.Crc32cIntChecksum");
}
@Test
public void testCirceChecksumShade() throws Exception {
Class.forName("dlshade.com.scurrilous.circe.checksum.Crc32cIntChecksum");
assertTrue(true);
}
@Test
public void testDistributedLogCommon() throws Exception {
Class.forName("org.apache.distributedlog.common.concurrent.AsyncSemaphore");
assertTrue(true);
}
@Test
public void testDistributedLogProto() throws Exception {
Class.forName("org.apache.distributedlog.DLSN");
assertTrue(true);
}
@Test
public void testDistributedLogCore() throws Exception {
Class.forName("org.apache.distributedlog.api.AsyncLogReader");
assertTrue(true);
}
@Test
public void testShadeLedgerManagerFactoryWithoutConfiguredLedgerManagerClass() throws Exception {
testShadeLedgerManagerFactoryAllowed(
null,
true);
}
@Test
public void testShadeLedgerManagerFactoryWithConfiguredLedgerManagerClass() throws Exception {
testShadeLedgerManagerFactoryAllowed(
"org.apache.bookkeeper.meta.HirerchicalLedgerManagerFactory",
true);
}
@Test
public void testShadeLedgerManagerFactoryDisallowedWithoutConfiguredLedgerManagerClass() throws Exception {
testShadeLedgerManagerFactoryAllowed(
null,
false);
}
@Test
public void testShadeLedgerManagerFactoryDisallowedWithConfiguredLedgerManagerClass() throws Exception {
testShadeLedgerManagerFactoryAllowed(
"org.apache.bookkeeper.meta.HirerchicalLedgerManagerFactory",
false);
}
@SuppressWarnings("unchecked")
private void testShadeLedgerManagerFactoryAllowed(String factoryClassName,
boolean allowShaded) throws Exception {
ServerConfiguration conf = new ServerConfiguration();
conf.setAllowShadedLedgerManagerFactoryClass(allowShaded);
conf.setLedgerManagerFactoryClassName(factoryClassName);
LayoutManager manager = mock(LayoutManager.class);
LedgerLayout layout = new LedgerLayout(
"org.apache.bookkeeper.meta.HierarchicalLedgerManagerFactory",
HierarchicalLedgerManagerFactory.CUR_VERSION);
when(manager.readLedgerLayout()).thenReturn(layout);
LedgerManagerFactory factory = mock(LedgerManagerFactory.class);
when(factory.initialize(any(AbstractConfiguration.class), same(manager), anyInt()))
.thenReturn(factory);
PowerMockito.mockStatic(ReflectionUtils.class);
when(ReflectionUtils.newInstance(any(Class.class)))
.thenReturn(factory);
try {
LedgerManagerFactory result = AbstractZkLedgerManagerFactory.newLedgerManagerFactory(
conf, manager);
if (allowShaded) {
assertSame(factory, result);
verify(factory, times(1))
.initialize(any(AbstractConfiguration.class), same(manager), anyInt());
} else {
fail("Should fail to instantiate ledger manager factory if allowShaded is false");
}
} catch (IOException ioe) {
if (allowShaded) {
fail("Should not fail to instantiate ledger manager factory is allowShaded is true");
} else {
assertTrue(ioe.getCause() instanceof ClassNotFoundException);
}
}
}
}
| |
package com.github.error418.opennms.client;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Date;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.github.error418.opennms.client.exception.RequiredPropertyException;
import com.github.error418.opennms.client.parameter.collection.CustomParameterCollection;
import com.github.error418.opennms.client.transfer.Event;
import com.github.error418.opennms.client.transfer.LogMessage;
import com.github.error418.opennms.client.transfer.LogMessageDestination;
import com.github.error418.opennms.client.transfer.Severity;
import com.github.error418.opennms.client.transfer.adapter.DateAdapter;
public class OpenNmsEventBuilderTest {
Event event;
@Before
public void buildSpy() {
event = Mockito.spy(new Event());
}
@Test
public void testBuilderMappings() throws Exception {
OpenNmsEventBuilder builder = new OpenNmsEventBuilder(event);
builder.host("host");
Mockito.verify(event, Mockito.times(1)).setHost(Mockito.eq("host"));
builder.source("source");
Mockito.verify(event, Mockito.times(1)).setSource(Mockito.eq("source"));
Date now = new Date();
builder.time(now);
Mockito.verify(event, Mockito.times(1)).setTime(Mockito.eq(now));
Assert.assertEquals(now, event.getTime());
Assert.assertNotSame(now, event.getTime());
builder.service("service");
Mockito.verify(event, Mockito.times(1)).setService(Mockito.eq("service"));
builder.interfaceAddress("127.0.0.1");
Mockito.verify(event, Mockito.times(1)).setInterfaceAddress(Mockito.any(InetAddress.class));
Assert.assertEquals("127.0.0.1", event.getInterfaceAddress().getHostAddress());
builder.description("description");
Mockito.verify(event, Mockito.times(1)).setDescription(Mockito.eq("description"));
builder.logMessage("logmsg");
Mockito.verify(event, Mockito.times(1)).setLogMessage(Mockito.any(LogMessage.class));
Assert.assertEquals("logmsg", event.getLogMessage().getValue());
builder.logMessage("logmsg", LogMessageDestination.DISCARD_TRAPS);
Assert.assertEquals(LogMessageDestination.DISCARD_TRAPS, event.getLogMessage().getDestination());
builder.severity(Severity.INDETERMINATE);
Mockito.verify(event, Mockito.times(1)).setSeverity(Mockito.eq(Severity.INDETERMINATE));
builder.uei("uei");
Mockito.verify(event, Mockito.times(1)).setUei(Mockito.eq("uei"));
builder.nodeId(1);
Mockito.verify(event, Mockito.times(1)).setNodeId(Mockito.eq(1));
builder.operationInstruction("operinstruct");
Mockito.verify(event, Mockito.times(1)).setOperationInstruction(Mockito.eq("operinstruct"));
}
@Test
public void testParameterCollectionOrder() throws Exception {
OpenNmsEventBuilder builder = new OpenNmsEventBuilder(event);
builder.parameter(
new CustomParameterCollection()
.setThirdParameter("3")
.setSecondParameter(2)
.setFirstParameter("1")
);
Assert.assertEquals("1", event.getParameterList().get(0).getParameterValue().getValue());
Assert.assertEquals("2", event.getParameterList().get(1).getParameterValue().getValue());
Assert.assertEquals("3", event.getParameterList().get(2).getParameterValue().getValue());
}
//////////////////////////////////////////////////////////////////////////////////////////////
// Validation Tests
//////////////////////////////////////////////////////////////////////////////////////////////
@Test(expected = RequiredPropertyException.class)
public void testValidationOnDirectSend() throws Exception {
OpenNmsEventBuilder.create().send(InetAddress.getLoopbackAddress());
}
@Test
public void testValidationUeiIsNull() throws Exception {
try {
OpenNmsEventBuilder.create()
.uei(null)
.time(new Date())
.source("some source")
.send(InetAddress.getLoopbackAddress());
Assert.fail();
}
catch (RequiredPropertyException e) {
Assert.assertEquals(e.getPropertyName(), "UEI");
}
}
@Test
public void testValidationTimeIsNull() throws Exception {
try {
OpenNmsEventBuilder.create()
.uei("some uei")
.source("some source")
.time(null)
.send(InetAddress.getLoopbackAddress());
Assert.fail();
}
catch (RequiredPropertyException e) {
Assert.assertEquals(e.getPropertyName(), "time");
}
}
@Test
public void testValidationSourceIsNull() throws Exception {
try {
OpenNmsEventBuilder.create()
.uei("some uei")
.source(null)
.time(new Date())
.send(InetAddress.getLoopbackAddress());
Assert.fail();
}
catch (RequiredPropertyException e) {
Assert.assertEquals(e.getPropertyName(), "source");
}
}
//////////////////////////////////////////////////////////////////////////////////////////////
// XML Tests
//////////////////////////////////////////////////////////////////////////////////////////////
@Test
public void testXml() throws Exception {
final String host = "HOST";
final String source = "SOURCE";
final String service = "SERVICE";
final Date time = new Date();
final String interfaceAddress = "127.0.0.1";
final String description = "DESCRIPTION";
final String logMessage = "LOGMESSAGE";
final String uei = "UEI";
final int nodeId = 1337;
final String operInstruct = "OPERINSTRUCT";
final String parameterName1 = "testNumber";
final int parameterValue1 = 1337;
final String parameterName2 = "anotherTestNumber";
final double parameterValue2 = 23.0;
DateAdapter dateAdapter = new DateAdapter();
String xml = OpenNmsEventBuilder.create()
.host(host)
.source(source)
.time(time)
.service(service)
.interfaceAddress(interfaceAddress)
.description(description)
.logMessage(logMessage, LogMessageDestination.DISCARD_TRAPS)
.severity(Severity.INDETERMINATE)
.uei(uei)
.nodeId(nodeId)
.operationInstruction(operInstruct)
.parameter(parameterName1, parameterValue1)
.parameter(parameterName2, parameterValue2)
.getXmlString();
Assert.assertNotNull(xml);
Assert.assertThat(xml.length(), Matchers.greaterThan(0));
Assert.assertThat(xml, Matchers.startsWith("<?xml"));
Assert.assertThat(xml, Matchers.containsString(host));
Assert.assertThat(xml, Matchers.containsString(source));
Assert.assertThat(xml, Matchers.containsString(service));
Assert.assertThat(xml, Matchers.containsString(dateAdapter.marshal(time)));
Assert.assertThat(xml, Matchers.containsString(interfaceAddress));
Assert.assertThat(xml, Matchers.containsString(description));
Assert.assertThat(xml, Matchers.containsString(logMessage));
Assert.assertThat(xml, Matchers.containsString(uei));
Assert.assertThat(xml, Matchers.containsString(String.valueOf(nodeId)));
Assert.assertThat(xml, Matchers.containsString(operInstruct));
Assert.assertThat(xml, Matchers.containsString(parameterName1));
Assert.assertThat(xml, Matchers.containsString(parameterName2));
Assert.assertThat(xml, Matchers.containsString(String.valueOf(parameterValue1)));
Assert.assertThat(xml, Matchers.containsString(String.valueOf(parameterValue2)));
}
@Test(expected = UnknownHostException.class)
public void testInterfaceAddressException() throws Exception {
OpenNmsEventBuilder
.create()
.interfaceAddress("192.168.0.500");
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.actions;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.intention.EmptyIntentionAction;
import com.intellij.codeInsight.intention.HighPriorityAction;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.codeInspection.ex.LocalInspectionToolWrapper;
import com.intellij.codeInspection.ex.PerformFixesModalTask;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SequentialModalProgressTask;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* User: anna
* Date: 21-Feb-2006
*/
public class CleanupInspectionIntention implements IntentionAction, HighPriorityAction {
private final static Logger LOG = Logger.getInstance(CleanupInspectionIntention.class);
private final InspectionToolWrapper myToolWrapper;
private final Class myQuickfixClass;
private final String myText;
public CleanupInspectionIntention(@NotNull InspectionToolWrapper toolWrapper, @NotNull Class quickFixClass, String text) {
myToolWrapper = toolWrapper;
myQuickfixClass = quickFixClass;
myText = text;
}
@Override
@NotNull
public String getText() {
return InspectionsBundle.message("fix.all.inspection.problems.in.file", myToolWrapper.getDisplayName());
}
@Override
@NotNull
public String getFamilyName() {
return getText();
}
@Override
public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file) throws IncorrectOperationException {
final List<ProblemDescriptor> descriptions =
ProgressManager.getInstance().runProcess(() -> {
InspectionManager inspectionManager = InspectionManager.getInstance(project);
return InspectionEngine.runInspectionOnFile(file, myToolWrapper, inspectionManager.createNewGlobalContext(false));
}, new EmptyProgressIndicator());
if (!descriptions.isEmpty() && !FileModificationService.getInstance().preparePsiElementForWrite(file)) return;
final AbstractPerformFixesTask fixesTask = applyFixes(project, "Apply Fixes", descriptions, myQuickfixClass);
if (!fixesTask.isApplicableFixFound()) {
HintManager.getInstance().showErrorHint(editor, "Unfortunately '" + myText + "' is currently not available for batch mode\n User interaction is required for each problem found");
}
}
public static AbstractPerformFixesTask applyFixes(@NotNull Project project,
@NotNull String presentationText,
@NotNull List<ProblemDescriptor> descriptions,
@Nullable Class quickfixClass) {
Collections.sort(descriptions, (o1, o2) -> {
final ProblemDescriptorBase d1 = (ProblemDescriptorBase)o1;
final ProblemDescriptorBase d2 = (ProblemDescriptorBase)o2;
final int elementsDiff = PsiUtilCore.compareElementsByPosition(d1.getPsiElement(), d2.getPsiElement());
if (elementsDiff == 0) {
return Comparing.compare(d1.getDescriptionTemplate(), d2.getDescriptionTemplate());
}
return -elementsDiff;
});
final SequentialModalProgressTask progressTask =
new SequentialModalProgressTask(project, presentationText, true);
final boolean isBatch = quickfixClass != null && BatchQuickFix.class.isAssignableFrom(quickfixClass);
final AbstractPerformFixesTask fixesTask = isBatch ?
new PerformBatchFixesTask(project, descriptions.toArray(ProblemDescriptor.EMPTY_ARRAY), progressTask, quickfixClass) :
new PerformFixesTask(project, descriptions.toArray(ProblemDescriptor.EMPTY_ARRAY), progressTask, quickfixClass);
CommandProcessor.getInstance().executeCommand(project, () -> {
CommandProcessor.getInstance().markCurrentCommandAsGlobal(project);
progressTask.setMinIterationTime(200);
progressTask.setTask(fixesTask);
ProgressManager.getInstance().run(progressTask);
}, presentationText, null);
return fixesTask;
}
@Override
public boolean isAvailable(@NotNull final Project project, final Editor editor, final PsiFile file) {
return myQuickfixClass != EmptyIntentionAction.class &&
!(myToolWrapper instanceof LocalInspectionToolWrapper && ((LocalInspectionToolWrapper)myToolWrapper).isUnfair());
}
@Override
public boolean startInWriteAction() {
return false;
}
private static abstract class AbstractPerformFixesTask extends PerformFixesModalTask {
private boolean myApplicableFixFound = false;
protected final Class myQuickfixClass;
public AbstractPerformFixesTask(@NotNull Project project,
@NotNull CommonProblemDescriptor[] descriptors,
@NotNull SequentialModalProgressTask task,
@Nullable Class quickfixClass) {
super(project, descriptors, task);
myQuickfixClass = quickfixClass;
}
protected abstract void collectFix(QuickFix fix, ProblemDescriptor descriptor, Project project);
@Override
protected final void applyFix(Project project, CommonProblemDescriptor descriptor) {
final QuickFix[] fixes = descriptor.getFixes();
if (fixes != null && fixes.length > 0) {
for (final QuickFix fix : fixes) {
if (fix != null && (myQuickfixClass == null || fix.getClass().isAssignableFrom(myQuickfixClass))) {
final ProblemDescriptor problemDescriptor = (ProblemDescriptor)descriptor;
final PsiElement element = problemDescriptor.getPsiElement();
if (element != null && element.isValid()) {
collectFix(fix, problemDescriptor, project);
myApplicableFixFound = true;
}
break;
}
}
}
}
public final boolean isApplicableFixFound() {
return myApplicableFixFound;
}
}
private static class PerformBatchFixesTask extends AbstractPerformFixesTask {
private final List<ProblemDescriptor> myBatchModeDescriptors = new ArrayList<>();
private boolean myApplied = false;
public PerformBatchFixesTask(@NotNull Project project,
@NotNull CommonProblemDescriptor[] descriptors,
@NotNull SequentialModalProgressTask task,
@NotNull Class quickfixClass) {
super(project, descriptors, task, quickfixClass);
}
@Override
protected void collectFix(QuickFix fix, ProblemDescriptor descriptor, Project project) {
myBatchModeDescriptors.add(descriptor);
}
@Override
public boolean isDone() {
if (super.isDone()) {
if (!myApplied && !myBatchModeDescriptors.isEmpty()) {
final ProblemDescriptor representative = myBatchModeDescriptors.get(0);
LOG.assertTrue(representative.getFixes() != null);
for (QuickFix fix : representative.getFixes()) {
if (fix != null && fix.getClass().isAssignableFrom(myQuickfixClass)) {
((BatchQuickFix)fix).applyFix(myProject,
myBatchModeDescriptors.toArray(new ProblemDescriptor[myBatchModeDescriptors.size()]),
new ArrayList<>(),
null);
break;
}
}
myApplied = true;
}
return true;
}
else {
return false;
}
}
}
private static class PerformFixesTask extends AbstractPerformFixesTask {
public PerformFixesTask(@NotNull Project project,
@NotNull CommonProblemDescriptor[] descriptors,
@NotNull SequentialModalProgressTask task,
@Nullable Class quickFixClass) {
super(project, descriptors, task, quickFixClass);
}
@Override
protected void collectFix(QuickFix fix, ProblemDescriptor descriptor, Project project) {
fix.applyFix(project, descriptor);
}
}
}
| |
/*************************************************************************/
/* GodotView.java */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
package org.godotengine.godot;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.content.ContextWrapper;
import android.view.InputDevice;
import android.hardware.input.InputManager;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import org.godotengine.godot.input.InputManagerCompat;
import org.godotengine.godot.input.InputManagerCompat.InputDeviceListener;
/**
* A simple GLSurfaceView sub-class that demonstrate how to perform
* OpenGL ES 2.0 rendering into a GL Surface. Note the following important
* details:
*
* - The class must use a custom context factory to enable 2.0 rendering.
* See ContextFactory class definition below.
*
* - The class must use a custom EGLConfigChooser to be able to select
* an EGLConfig that supports 2.0. This is done by providing a config
* specification to eglChooseConfig() that has the attribute
* EGL10.ELG_RENDERABLE_TYPE containing the EGL_OPENGL_ES2_BIT flag
* set. See ConfigChooser class definition below.
*
* - The class must select the surface's format, then choose an EGLConfig
* that matches it exactly (with regards to red/green/blue/alpha channels
* bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
*/
public class GodotView extends GLSurfaceView implements InputDeviceListener {
private static String TAG = "GodotView";
private static final boolean DEBUG = false;
private static Context ctx;
private static GodotIO io;
private static boolean firsttime=true;
private static boolean use_gl3=false;
private static boolean use_32=false;
private Godot activity;
private InputManagerCompat mInputManager;
public GodotView(Context context,GodotIO p_io,boolean p_use_gl3, boolean p_use_32_bits, Godot p_activity) {
super(context);
ctx=context;
io=p_io;
use_gl3=p_use_gl3;
use_32=p_use_32_bits;
activity = p_activity;
if (!p_io.needsReloadHooks()) {
//will only work on SDK 11+!!
setPreserveEGLContextOnPause(true);
}
mInputManager = InputManagerCompat.Factory.getInputManager(this.getContext());
mInputManager.registerInputDeviceListener(this, null);
init(false, 16, 0);
}
public GodotView(Context context, boolean translucent, int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
}
@Override public boolean onTouchEvent (MotionEvent event) {
return activity.gotTouchEvent(event);
};
public int get_godot_button(int keyCode) {
int button = 0;
switch (keyCode) {
case KeyEvent.KEYCODE_BUTTON_A: // Android A is SNES B
button = 0;
break;
case KeyEvent.KEYCODE_BUTTON_B:
button = 1;
break;
case KeyEvent.KEYCODE_BUTTON_X: // Android X is SNES Y
button = 2;
break;
case KeyEvent.KEYCODE_BUTTON_Y:
button = 3;
break;
case KeyEvent.KEYCODE_BUTTON_L1:
button = 9;
break;
case KeyEvent.KEYCODE_BUTTON_L2:
button = 15;
break;
case KeyEvent.KEYCODE_BUTTON_R1:
button = 10;
break;
case KeyEvent.KEYCODE_BUTTON_R2:
button = 16;
break;
case KeyEvent.KEYCODE_BUTTON_SELECT:
button = 4;
break;
case KeyEvent.KEYCODE_BUTTON_START:
button = 6;
break;
case KeyEvent.KEYCODE_BUTTON_THUMBL:
button = 7;
break;
case KeyEvent.KEYCODE_BUTTON_THUMBR:
button = 8;
break;
case KeyEvent.KEYCODE_DPAD_UP:
button = 11;
break;
case KeyEvent.KEYCODE_DPAD_DOWN:
button = 12;
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
button = 13;
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
button = 14;
break;
case KeyEvent.KEYCODE_BUTTON_C:
button = 17;
break;
case KeyEvent.KEYCODE_BUTTON_Z:
button = 18;
break;
default:
button = keyCode - KeyEvent.KEYCODE_BUTTON_1 + 20;
break;
};
return button;
};
private static class joystick {
public int device_id;
public String name;
public ArrayList<InputDevice.MotionRange> axes;
public ArrayList<InputDevice.MotionRange> hats;
}
private static class RangeComparator implements Comparator<InputDevice.MotionRange> {
@Override
public int compare(InputDevice.MotionRange arg0, InputDevice.MotionRange arg1) {
return arg0.getAxis() - arg1.getAxis();
}
}
ArrayList<joystick> joy_devices = new ArrayList<joystick>();
private int find_joy_device(int device_id) {
for (int i=0; i<joy_devices.size(); i++) {
if (joy_devices.get(i).device_id == device_id) {
return i;
}
}
onInputDeviceAdded(device_id);
return joy_devices.size() - 1;
}
@Override public void onInputDeviceAdded(int deviceId) {
joystick joy = new joystick();
joy.device_id = deviceId;
int id = joy_devices.size();
InputDevice device = mInputManager.getInputDevice(deviceId);
joy.name = device.getName();
joy.axes = new ArrayList<InputDevice.MotionRange>();
joy.hats = new ArrayList<InputDevice.MotionRange>();
List<InputDevice.MotionRange> ranges = device.getMotionRanges();
Collections.sort(ranges, new RangeComparator());
for (InputDevice.MotionRange range : ranges) {
if (range.getAxis() == MotionEvent.AXIS_HAT_X || range.getAxis() == MotionEvent.AXIS_HAT_Y) {
joy.hats.add(range);
}
else {
joy.axes.add(range);
}
}
joy_devices.add(joy);
GodotLib.joyconnectionchanged(id, true, joy.name);
}
@Override public void onInputDeviceRemoved(int deviceId) {
int id = find_joy_device(deviceId);
joy_devices.remove(id);
GodotLib.joyconnectionchanged(id, false, "");
}
@Override public void onInputDeviceChanged(int deviceId) {
}
@Override public boolean onKeyUp(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
return true;
}
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
return super.onKeyUp(keyCode, event);
};
int source = event.getSource();
if ((source & InputDevice.SOURCE_JOYSTICK) != 0 || (source & InputDevice.SOURCE_DPAD) != 0 || (source & InputDevice.SOURCE_GAMEPAD) != 0) {
int button = get_godot_button(keyCode);
int device = find_joy_device(event.getDeviceId());
GodotLib.joybutton(device, button, false);
return true;
} else {
GodotLib.key(keyCode, event.getUnicodeChar(0), false);
};
return super.onKeyUp(keyCode, event);
};
@Override public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
GodotLib.quit();
// press 'back' button should not terminate program
//normal handle 'back' event in game logic
return true;
}
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
return super.onKeyDown(keyCode, event);
};
int source = event.getSource();
//Log.e(TAG, String.format("Key down! source %d, device %d, joystick %d, %d, %d", event.getDeviceId(), source, (source & InputDevice.SOURCE_JOYSTICK), (source & InputDevice.SOURCE_DPAD), (source & InputDevice.SOURCE_GAMEPAD)));
if ((source & InputDevice.SOURCE_JOYSTICK) != 0 || (source & InputDevice.SOURCE_DPAD) != 0 || (source & InputDevice.SOURCE_GAMEPAD) != 0) {
if (event.getRepeatCount() > 0) // ignore key echo
return true;
int button = get_godot_button(keyCode);
int device = find_joy_device(event.getDeviceId());
//Log.e(TAG, String.format("joy button down! button %x, %d, device %d", keyCode, button, device));
GodotLib.joybutton(device, button, true);
return true;
} else {
GodotLib.key(keyCode, event.getUnicodeChar(0), true);
};
return super.onKeyDown(keyCode, event);
}
@Override public boolean onGenericMotionEvent(MotionEvent event) {
if ((event.getSource() & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK && event.getAction() == MotionEvent.ACTION_MOVE) {
int device_id = find_joy_device(event.getDeviceId());
joystick joy = joy_devices.get(device_id);
for (int i = 0; i < joy.axes.size(); i++) {
InputDevice.MotionRange range = joy.axes.get(i);
float value = (event.getAxisValue(range.getAxis()) - range.getMin() ) / range.getRange() * 2.0f - 1.0f;
//Log.e(TAG, String.format("axis event: %d, value %f", i, value));
GodotLib.joyaxis(device_id, i, value);
}
for (int i = 0; i < joy.hats.size(); i+=2) {
int hatX = Math.round(event.getAxisValue(joy.hats.get(i).getAxis()));
int hatY = Math.round(event.getAxisValue(joy.hats.get(i+1).getAxis()));
//Log.e(TAG, String.format("HAT EVENT %d, %d", hatX, hatY));
GodotLib.joyhat(device_id, hatX, hatY);
}
return true;
};
return super.onGenericMotionEvent(event);
};
private void init(boolean translucent, int depth, int stencil) {
this.setFocusableInTouchMode(true);
/* By default, GLSurfaceView() creates a RGB_565 opaque surface.
* If we want a translucent one, we should change the surface's
* format here, using PixelFormat.TRANSLUCENT for GL Surfaces
* is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
*/
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
if (use_32) {
setEGLConfigChooser( translucent ?
new FallbackConfigChooser(8, 8, 8, 8, 24, stencil, new ConfigChooser(8, 8, 8, 8, 16, stencil)) :
new FallbackConfigChooser(8, 8, 8, 8, 24, stencil, new ConfigChooser(5, 6, 5, 0, 16, stencil)) );
} else {
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, 16, stencil) :
new ConfigChooser(5, 6, 5, 0, 16, stencil) );
}
/* Set the renderer responsible for frame rendering */
setRenderer(new Renderer());
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
if (use_gl3)
Log.w(TAG, "creating OpenGL ES 3.0 context :");
else
Log.w(TAG, "creating OpenGL ES 2.0 context :");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list2 = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
int[] attrib_list3 = {EGL_CONTEXT_CLIENT_VERSION, 3, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, use_gl3?attrib_list3:attrib_list2);
checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
/* Fallback if 32bit View is not supported*/
private static class FallbackConfigChooser extends ConfigChooser {
private ConfigChooser fallback;
public FallbackConfigChooser(int r, int g, int b, int a, int depth, int stencil, ConfigChooser fallback) {
super(r, g, b, a, depth, stencil);
this.fallback = fallback;
}
@Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
EGLConfig ec = super.chooseConfig(egl, display, configs);
if (ec == null) {
Log.w(TAG, "Trying ConfigChooser fallback");
ec = fallback.chooseConfig(egl, display, configs);
use_32=false;
}
return ec;
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
// EGL10.EGL_DEPTH_SIZE, 16,
// EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
private static int[] s_configAttribs3 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
// EGL10.EGL_DEPTH_SIZE, 16,
// EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, //apparently there is no EGL_OPENGL_ES3_BIT
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, use_gl3?s_configAttribs3:s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, use_gl3?s_configAttribs3:s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.w(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
private static class Renderer implements GLSurfaceView.Renderer {
public void onDrawFrame(GL10 gl) {
GodotLib.step();
for(int i=0;i<Godot.singleton_count;i++) {
Godot.singletons[i].onGLDrawFrame(gl);
}
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GodotLib.resize(width, height,!firsttime);
firsttime=false;
for(int i=0;i<Godot.singleton_count;i++) {
Godot.singletons[i].onGLSurfaceChanged(gl, width, height);
}
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GodotLib.newcontext(use_32);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.tests.pushruntime;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.algebricks.data.IPrinterFactory;
import org.apache.hyracks.algebricks.data.impl.BinaryBooleanInspectorImpl;
import org.apache.hyracks.algebricks.data.impl.BinaryIntegerInspectorImpl;
import org.apache.hyracks.algebricks.data.impl.IntegerPrinterFactory;
import org.apache.hyracks.algebricks.data.impl.NoopMissingWriterFactory;
import org.apache.hyracks.algebricks.data.impl.UTF8StringPrinterFactory;
import org.apache.hyracks.algebricks.runtime.aggregators.TupleCountAggregateFunctionFactory;
import org.apache.hyracks.algebricks.runtime.aggregators.TupleCountRunningAggregateFunctionFactory;
import org.apache.hyracks.algebricks.runtime.base.AlgebricksPipeline;
import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.base.IRunningAggregateEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.evaluators.TupleFieldEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.operators.aggreg.AggregateRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.aggreg.NestedPlansAccumulatingAggregatorFactory;
import org.apache.hyracks.algebricks.runtime.operators.group.MicroPreClusteredGroupRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
import org.apache.hyracks.algebricks.runtime.operators.meta.SubplanRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.sort.InMemorySortRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.PrinterRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.RunningAggregateRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.SplitOperatorDescriptor;
import org.apache.hyracks.algebricks.runtime.operators.std.StreamLimitRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.StringStreamingRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.std.UnnestRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
import org.apache.hyracks.algebricks.tests.util.AlgebricksHyracksIntegrationUtil;
import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.FileSplit;
import org.apache.hyracks.api.io.ManagedFileSplit;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
import org.apache.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.parsers.FloatParserFactory;
import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import org.apache.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
import org.apache.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
import org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
import org.apache.hyracks.dataflow.std.file.LineFileWriteOperatorDescriptor;
import org.apache.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
import org.apache.hyracks.dataflow.std.misc.ReplicateOperatorDescriptor;
import org.apache.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import junit.framework.Assert;
public class PushRuntimeTest {
private static final String SEPARATOR = System.getProperty("file.separator");
private static final String PATH_ACTUAL = "rttest";
private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources";
private static final String PATH_EXPECTED = PATH_BASE + SEPARATOR + "results";
private static final int FRAME_SIZE = 32768;
private static final String[] DEFAULT_NODES = new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID };
private static final AtomicInteger aInteger = new AtomicInteger(0);
@BeforeClass
public static void setUp() throws Exception {
File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
AlgebricksHyracksIntegrationUtil.init();
}
@AfterClass
public static void tearDown() throws Exception {
AlgebricksHyracksIntegrationUtil.deinit();
File outdir = new File(PATH_ACTUAL);
File[] files = outdir.listFiles();
if (files == null || files.length == 0) {
outdir.delete();
}
}
@Test
public void etsAssignPrint() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
AssignRuntimeFactory assign = new AssignRuntimeFactory(new int[] { 0, 1 },
new IScalarEvaluatorFactory[] { const1, const2 }, new int[] { 0, 1 });
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
PrinterRuntimeFactory printer = new PrinterRuntimeFactory(new int[] { 0, 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, assignDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, printer },
new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
}
@Test
public void etsAssignWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
AssignRuntimeFactory assign = new AssignRuntimeFactory(new int[] { 0, 1 },
new IScalarEvaluatorFactory[] { const1, const2 }, new int[] { 0, 1 });
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0, 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, outFile,
PrinterBasedWriterFactory.INSTANCE, assignDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, writer },
new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("400; 3", buf.toString());
outFile.delete();
}
@Test
public void scanSelectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] intFileSplits = new FileSplit[1];
intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+ "simple" + File.separator + "int-part1.tbl");
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(intFileSplits);
RecordDescriptor intScannerDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), intScannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
// the algebricks op.
IScalarEvaluatorFactory cond = new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2),
new TupleFieldEvaluatorFactory(0));
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 0 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = intScannerDesc;
String filePath = PATH_ACTUAL + SEPARATOR + "scanSelectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
PartitionConstraintHelper.addPartitionCountConstraint(spec, algebricksOp, 1);
spec.connect(new OneToOneConnectorDescriptor(spec), intScanner, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("0", buf.toString());
outFile.delete();
}
@Test
public void etsAssignProjectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
AssignRuntimeFactory assign = new AssignRuntimeFactory(new int[] { 0, 1 },
new IScalarEvaluatorFactory[] { const1, const2 }, new int[] { 0, 1 });
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project = new StreamProjectRuntimeFactory(new int[] { 1 });
RecordDescriptor projectDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignProjectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
projectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, project, writer },
new RecordDescriptor[] { etsDesc, assignDesc, projectDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("3", buf.toString());
outFile.delete();
}
@Test
public void scanLimitWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+ "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner, DEFAULT_NODES);
// the algebricks op.
StreamLimitRuntimeFactory limit = new StreamLimitRuntimeFactory(new IntegerConstantEvalFactory(2), null,
new int[] { 0 }, BinaryIntegerInspectorImpl.FACTORY);
RecordDescriptor limitDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanLimitWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
limitDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { limit, writer }, new RecordDescriptor[] { limitDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("12", buf.toString());
outFile.delete();
}
@Test
public void etsUnnestWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
RecordDescriptor unnestDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
unnestDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, unnest, writer },
new RecordDescriptor[] { etsDesc, unnestDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("100200300", buf.toString());
outFile.delete();
}
@Test
public void scanAggregateWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
"data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
RecordDescriptor aggDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanAggregateWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
aggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { agg, writer }, new RecordDescriptor[] { aggDesc, null });
PartitionConstraintHelper.addPartitionCountConstraint(spec, algebricksOp, 1);
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("75", buf.toString());
outFile.delete();
}
@Test
public void scanSortGbySelectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+ "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the sort (by nation id)
RecordDescriptor sortDesc = scannerDesc;
InMemorySortOperatorDescriptor sort = new InMemorySortOperatorDescriptor(spec, new int[] { 3 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
sortDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sort,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the group-by
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
RecordDescriptor aggDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
PreclusteredGroupOperatorDescriptor gby = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 3 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf,
gbyDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, gby,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, sort, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sort, 0, gby, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), gby, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("9", buf.toString());
outFile.delete();
}
@Test
public void etsUnnestRunningaggregateWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
RecordDescriptor unnestDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
RunningAggregateRuntimeFactory ragg = new RunningAggregateRuntimeFactory(new int[] { 1 },
new IRunningAggregateEvaluatorFactory[] { new TupleCountRunningAggregateFunctionFactory() },
new int[] { 0, 1 });
RecordDescriptor raggDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestRunningaggregateWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
raggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, unnest, ragg, writer },
new RecordDescriptor[] { etsDesc, unnestDesc, raggDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("123", buf.toString());
outFile.delete();
}
@Test
public void etsAssignScriptWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
AssignRuntimeFactory assign = new AssignRuntimeFactory(new int[] { 0, 1 },
new IScalarEvaluatorFactory[] { const1, const2 }, new int[] { 0, 1 });
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
IValueParserFactory[] valueParsers = { IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE };
String osname = System.getProperty("os.name");
String command;
if (osname.equals("Linux")) {
command = "bash target/testscripts/idscript";
} else if (osname.startsWith("Windows")) {
command = "target\\testscripts\\idscript.cmd";
} else {
// don't know how to test
return;
}
StringStreamingRuntimeFactory script = new StringStreamingRuntimeFactory(command,
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, ' ',
new DelimitedDataTupleParserFactory(valueParsers, ' '));
RecordDescriptor scriptDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignScriptWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0, 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, outFile,
PrinterBasedWriterFactory.INSTANCE, scriptDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, script, writer },
new RecordDescriptor[] { etsDesc, assignDesc, scriptDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("400; 3", buf.toString());
outFile.delete();
}
@Test
public void scanReplicateWrite() throws Exception {
final int outputArity = 2;
JobSpecification spec = new JobSpecification(FRAME_SIZE);
String inputFileName = "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl";
FileSplit[] inputSplits = new FileSplit[] {
new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
RecordDescriptor stringRec = new RecordDescriptor(
new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec,
new ConstantFileSplitProvider(inputSplits), stringParser, stringRec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(spec, stringRec, outputArity);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, replicateOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputArity];
File[] outputFile = new File[outputArity];
for (int i = 0; i < outputArity; i++) {
FileSplit fileSplit = createFile(AlgebricksHyracksIntegrationUtil.nc1);
outputFile[i] = fileSplit.getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] { fileSplit });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i],
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
}
spec.connect(new OneToOneConnectorDescriptor(spec), scanOp, 0, replicateOp, 0);
for (int i = 0; i < outputArity; i++) {
spec.connect(new OneToOneConnectorDescriptor(spec), replicateOp, i, outputOp[i], 0);
}
for (int i = 0; i < outputArity; i++) {
spec.addRoot(outputOp[i]);
}
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
compareFiles("data" + File.separator + "device0" + File.separator + inputFileName, outputFile[i]
.getAbsolutePath());
}
}
public FileSplit createFile(NodeControllerService ncs) throws IOException {
String fileName = "f" + aInteger.getAndIncrement() + ".tmp";
FileReference fileRef = ncs.getIoManager().getFileReference(0, fileName);
FileUtils.deleteQuietly(fileRef.getFile());
fileRef.getFile().createNewFile();
return new ManagedFileSplit(ncs.getId(), fileName);
}
@Test
public void scanSplitWrite() throws Exception {
final int outputArity = 2;
JobSpecification spec = new JobSpecification(FRAME_SIZE);
String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl", "data"
+ File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
"data" + File.separator + "simple" + File.separator + "int-string-part1-split-1.tbl" };
File[] inputFiles = new File[inputFileName.length];
for (int i = 0; i < inputFileName.length; i++) {
inputFiles[i] = new File(inputFileName[i]);
}
File[] outputFile = new File[outputArity];
FileSplit[] outputFileSplit = new FileSplit[outputArity];
for (int i = 0; i < outputArity; i++) {
outputFileSplit[i] = createFile(AlgebricksHyracksIntegrationUtil.nc1);
outputFile[i] = outputFileSplit[i].getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
}
FileSplit[] inputSplits = new FileSplit[] {
new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(inputSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
SplitOperatorDescriptor splitOp = new SplitOperatorDescriptor(spec, scannerDesc, outputArity,
new TupleFieldEvaluatorFactory(0), BinaryIntegerInspectorImpl.FACTORY);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, splitOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
for (int i = 0; i < outputArity; i++) {
outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] { outputFileSplit[i] });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i],
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
}
spec.connect(new OneToOneConnectorDescriptor(spec), intScanner, 0, splitOp, 0);
for (int i = 0; i < outputArity; i++) {
spec.connect(new OneToOneConnectorDescriptor(spec), splitOp, i, outputOp[i], 0);
}
for (int i = 0; i < outputArity; i++) {
spec.addRoot(outputOp[i]);
}
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1], outputFile[i]
.getAbsolutePath());
}
}
@Test
public void scanMicroSortWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+ "tpch0.001" + File.separator + "nation.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
InMemorySortRuntimeFactory sort = new InMemorySortRuntimeFactory(new int[] { 1 }, null,
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
null);
RecordDescriptor sortDesc = scannerDesc;
String fileName = "scanMicroSortWrite.out";
String filePath = PATH_ACTUAL + SEPARATOR + fileName;
String resultFilePath = PATH_EXPECTED + SEPARATOR + fileName;
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0, 1, 2, 3 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, UTF8StringPrinterFactory.INSTANCE,
IntegerPrinterFactory.INSTANCE, UTF8StringPrinterFactory.INSTANCE },
outFile, PrinterBasedWriterFactory.INSTANCE, sortDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { sort, writer }, new RecordDescriptor[] { sortDesc, null });
PartitionConstraintHelper.addPartitionCountConstraint(spec, algebricksOp, 1);
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
compareFiles(filePath, resultFilePath);
outFile.delete();
}
@Test
public void etsAssignSubplanProjectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
AssignRuntimeFactory assign1 = new AssignRuntimeFactory(new int[] { 0 },
new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
RecordDescriptor assign1Desc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
AssignRuntimeFactory assign2 = new AssignRuntimeFactory(new int[] { 1 },
new IScalarEvaluatorFactory[] { new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) },
new int[] { 0, 1 });
RecordDescriptor assign2Desc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project1 = new StreamProjectRuntimeFactory(new int[] { 1 });
RecordDescriptor project1Desc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, assign2, project1 },
new RecordDescriptor[] { assign1Desc, assign2Desc, project1Desc });
SubplanRuntimeFactory subplan = new SubplanRuntimeFactory(pipeline,
new IMissingWriterFactory[] { NoopMissingWriterFactory.INSTANCE }, assign1Desc, null);
RecordDescriptor subplanDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project2 = new StreamProjectRuntimeFactory(new int[] { 1 });
RecordDescriptor project2Desc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignSubplanProjectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
project2Desc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign1, subplan, project2, writer },
new RecordDescriptor[] { etsDesc, assign1Desc, subplanDesc, project2Desc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("403", buf.toString());
outFile.delete();
}
@Test
public void scanMicroSortGbySelectWrite() throws Exception {
JobSpecification spec = new JobSpecification(FRAME_SIZE);
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
"data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), FloatSerializerDeserializer.INSTANCE,
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor scanner = new FileScanOperatorDescriptor(spec, splitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanner,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the sort (by nation id)
RecordDescriptor sortDesc = scannerDesc;
InMemorySortRuntimeFactory sort = new InMemorySortRuntimeFactory(new int[] { 3 }, null,
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, null);
// the group-by
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
RecordDescriptor aggDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
MicroPreClusteredGroupRuntimeFactory gby = new MicroPreClusteredGroupRuntimeFactory(new int[] { 3 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf,
sortDesc, gbyDesc, null);
// the algebricks op.
IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = new RecordDescriptor(
new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { sort, gby, select, writer },
new RecordDescriptor[] { sortDesc, gbyDesc, selectDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, algebricksOp, 0);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
StringBuilder buf = new StringBuilder();
readFileToString(outFile, buf);
Assert.assertEquals("9", buf.toString());
outFile.delete();
}
private static void readFileToString(File file, StringBuilder buf) throws Exception {
BufferedReader result = new BufferedReader(new FileReader(file));
boolean first = true;
while (true) {
String s = result.readLine();
if (s == null) {
break;
} else {
if (!first) {
first = false;
buf.append('\n');
}
buf.append(s);
}
}
result.close();
}
public void compareFiles(String fileNameA, String fileNameB) throws IOException {
BufferedReader fileA = new BufferedReader(new FileReader(fileNameA));
BufferedReader fileB = new BufferedReader(new FileReader(fileNameB));
String lineA, lineB;
while ((lineA = fileA.readLine()) != null) {
lineB = fileB.readLine();
Assert.assertEquals(lineA, lineB);
}
Assert.assertNull(fileB.readLine());
}
}
| |
package com.meteotester.util;
import java.io.File;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import com.jayway.jsonpath.JsonPath;
import com.meteotester.config.Config;
import com.meteotester.entities.Forecast;
import com.meteotester.entities.Observation;
import com.meteotester.entities.Place;
import com.meteotester.entities.Source;
import com.meteotester.entities.WeatherSummary;
public class Parser {
private static Logger log = Logger.getLogger(Parser.class);
static int N = Config.NUM_DAYS_WEB;
static int NS = Config.NUM_DAYS_STORE;
public static WeatherSummary parse2web(Source source, Place place) throws Exception {
String sURL = source.getUrl();
sURL = sURL.replaceFirst(Pattern.quote("${COORDS}"), place.getCoords());
sURL = sURL.replaceFirst(Pattern.quote("${PLACE}"), place.getName());
sURL = sURL.replaceFirst(Pattern.quote("${COUNTRY}"), place.getCountry());
sURL = sURL.replaceFirst(Pattern.quote("${ID}"), String.valueOf(place.getId()));
log.debug(sURL);
//String json = Util.getContentFromMockFile(source);
String json = Util.getContentFromURL(sURL);
WeatherSummary summary = jsonForecasts2summary(json, source, place);
return summary;
}
public static String parse2store(Source source, Place place) {
String type = source.getType();
String result = source.getName() + " "+type+ " "+ place.getName()+" [OK]\n";
if (!Util.isProcessed(type, source, place)) {
String sURL = source.getUrl();
sURL = sURL.replaceFirst(Pattern.quote("${COORDS}"), place.getCoords());
sURL = sURL.replaceFirst(Pattern.quote("${PLACE}"), place.getName());
sURL = sURL.replaceFirst(Pattern.quote("${COUNTRY}"), place.getCountry());
sURL = sURL.replaceFirst(Pattern.quote("${ID}"), String.valueOf(place.getId()));
log.debug(sURL);
//String json = Util.getContentFromMockFile(source);
String json = Util.getContentFromURL(sURL);
File file = Util.saveToFile(json, type+"_json", source, place);
S3Util.saveFileToS3(file);
String tsv = "";
if (type.equals("forecasts"))
tsv = jsonForecasts2tsv(json, source, place);
else tsv = jsonObservations2tsv(json, source, place);
file = Util.saveToFile(tsv, type, source, place);
S3Util.saveFileToS3(file);
}
return result;
}
private static WeatherSummary jsonForecasts2summary(String json, Source source, Place place) {
String[] icon = new String[N];
double[] mintemp = new double[N];
double[] maxtemp = new double[N];
double[] qpf = new double[N];
double[] values = mintemp;
Object value = 0;
String[] jpVariables = source.getJpVariables();
Date today = parseDateInLocalTimeFromJson(json, source, 0);
String iconpath0 = source.getJpIcon();
try {
for (int i=0; i<N; i++) {
String iconpath = iconpath0.replaceFirst("\\[0\\]", "["+i+"]");
icon[i] = JsonPath.read(json,iconpath);
for (int j=0; j<jpVariables.length; j++) {
String[] variable = jpVariables[j].split(",");
if (variable[0].equals("qpf") || variable[0].equals("mintemp") || variable[0].equals("maxtemp")) {
if (variable[0].equals("qpf")) {
values = qpf;
} else if (variable[0].equals("mintemp")) {
values = mintemp;
} else if (variable[0].equals("maxtemp")) {
values = maxtemp;
}
variable[2] = variable[2].replace("[1]", "["+i+"]");
try {
value = JsonPath.read(json,variable[2]);
}
catch (Exception e) {
// this variable is optional in json like qpf in openweather
//e.printStackTrace();
value = 0;
}
values[i] = Double.valueOf(value.toString());
if (variable[0].equals("qpf") && source.getName().equals("forecastio"))
values[i]*=24;
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
WeatherSummary summary = new WeatherSummary(mintemp, maxtemp, qpf);
summary.setToday(today);
summary.setSourceName(source.getDomain());
summary.setIcon(icon);
return summary;
}
private static String jsonForecasts2tsv(String json, Source source, Place place) {
String result = "";
SimpleDateFormat sdf=new SimpleDateFormat("yyyy/MM/dd");
String name = source.getName();
//String json = source.getJson();
String jpLat = source.getJpLat();
String jpLong = source.getJpLong();
String jpUnixtime = source.getJpUnixtime();
String[] jpVariables = source.getJpVariables();
Object latitude = (jpLat == null)?null:JsonPath.read(json, jpLat);
Object longitude = (jpLong == null)?null:JsonPath.read(json, jpLong);
Forecast forecast = new Forecast(name, latitude, longitude,place.getName(), place.getCountry());
try {
for (int i=1; i<=NS; i++) {
Date date = parseDateInLocalTimeFromJson(json, source, i);
forecast.setTargetdate(sdf.format(date));
forecast.setDaysbefore(i);
for (int j=0; j<jpVariables.length; j++) {
String[] variable = jpVariables[j].split(",");
forecast.setVariable(variable[0]);
forecast.setUnits(variable[1]);
variable[2] = variable[2].replace("[1]", "["+i+"]");
try {
forecast.setValue(JsonPath.read(json,variable[2]));
}
catch (Exception e) { // this variable is optional in json
forecast.setValue(0);
}
result += forecast;
}
}
}catch (Exception e) {
e.printStackTrace();
}
return result;
}
private static String jsonObservations2tsv(String json, Source source, Place place) {
String result = "";
String name = source.getName();
//String json = source.getJson();
String jpLat = source.getJpLat();
String jpLong = source.getJpLong();
String[] jpVariables = source.getJpVariables();
Object latitude = (jpLat == null)?null:JsonPath.read(json, jpLat);
Object longitude = (jpLong == null)?null:JsonPath.read(json, jpLong);
Observation observation = new Observation(name, latitude, longitude, place.getName(), place.getCountry());
String year = JsonPath.read(json, source.getJpYear());
String month = JsonPath.read(json, source.getJpMon());
String day = JsonPath.read(json, source.getJpDay());
observation.setDate(year+"/"+month+"/"+day);
for (int j=0; j<jpVariables.length; j++) {
String[] variable = jpVariables[j].split(",");
observation.setVariable(variable[0]);
observation.setUnits(variable[1]);
observation.setValue(JsonPath.read(json,variable[2]));
result += observation;
}
//result += forecast;
return result;
}
/* For today, set daysafter = 0, for tomorrow daysafter = 1, etc */
private static Date parseDateInLocalTimeFromJson(String json, Source source, int daysafter) {
Date date = null;
long unixtime = 0;
String jpUnixtime = source.getJpUnixtime();
if (jpUnixtime != null) {
Object epoch = JsonPath.read(json, jpUnixtime.replace("[1]", "["+daysafter+"]"));
unixtime = Util.epoch2unixtime(epoch);
date=new Date(unixtime*1000);
}
if (source.getName().equals("wunderground")) { //epoch in wunderground is UTC, not local
Integer day = JsonPath.read(json, "$.forecast.simpleforecast.forecastday["+daysafter+"].date.day");
Integer month = JsonPath.read(json, "$.forecast.simpleforecast.forecastday["+daysafter+"].date.month");
Integer year = JsonPath.read(json, "$.forecast.simpleforecast.forecastday["+daysafter+"].date.year");
String strDate=String.format("%02d", day) + "-"+ String.format("%02d", month) + "-" + year;
DateFormat df = new SimpleDateFormat("dd-MM-yyyy");
try {
date = df.parse(strDate);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else if (source.getName().equals("forecastio")) { //unixtime in forecastio is UTC, not local
Integer offset = JsonPath.read(json,"$.offset");
unixtime+=offset*3600;
date = new Date(unixtime*1000);
}
else if (source.getName().equals("worldweatheronl")) { //date in worldweatheronl is yyyy-MM-dd
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
try {
date = df.parse((String) JsonPath.read(json, "$.data.weather["+daysafter+"].date"));
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return date;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.rest.service;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.HBaseConnection;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.rest.util.Serializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.acls.domain.AccessControlEntryImpl;
import org.springframework.security.acls.domain.AclAuthorizationStrategy;
import org.springframework.security.acls.domain.AclImpl;
import org.springframework.security.acls.domain.AuditLogger;
import org.springframework.security.acls.domain.GrantedAuthoritySid;
import org.springframework.security.acls.domain.ObjectIdentityImpl;
import org.springframework.security.acls.domain.PermissionFactory;
import org.springframework.security.acls.domain.PrincipalSid;
import org.springframework.security.acls.model.AccessControlEntry;
import org.springframework.security.acls.model.Acl;
import org.springframework.security.acls.model.AlreadyExistsException;
import org.springframework.security.acls.model.ChildrenExistException;
import org.springframework.security.acls.model.MutableAcl;
import org.springframework.security.acls.model.MutableAclService;
import org.springframework.security.acls.model.NotFoundException;
import org.springframework.security.acls.model.ObjectIdentity;
import org.springframework.security.acls.model.PermissionGrantingStrategy;
import org.springframework.security.acls.model.Sid;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.util.FieldUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
/**
* @author xduo
*
*/
@Component("aclService")
public class AclService implements MutableAclService {
private static final Logger logger = LoggerFactory.getLogger(AclService.class);
public static final String ACL_INFO_FAMILY = "i";
public static final String ACL_ACES_FAMILY = "a";
private static final String DEFAULT_TABLE_PREFIX = "kylin_metadata";
private static final String ACL_TABLE_NAME = "_acl";
private static final String ACL_INFO_FAMILY_TYPE_COLUMN = "t";
private static final String ACL_INFO_FAMILY_OWNER_COLUMN = "o";
private static final String ACL_INFO_FAMILY_PARENT_COLUMN = "p";
private static final String ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN = "i";
private Serializer<SidInfo> sidSerializer = new Serializer<SidInfo>(SidInfo.class);
private Serializer<DomainObjectInfo> domainObjSerializer = new Serializer<DomainObjectInfo>(DomainObjectInfo.class);
private Serializer<AceInfo> aceSerializer = new Serializer<AceInfo>(AceInfo.class);
private String hbaseUrl = null;
private String tableNameBase = null;
private String aclTableName = null;
private final Field fieldAces = FieldUtils.getField(AclImpl.class, "aces");
private final Field fieldAcl = FieldUtils.getField(AccessControlEntryImpl.class, "acl");
@Autowired
protected PermissionGrantingStrategy permissionGrantingStrategy;
@Autowired
protected PermissionFactory aclPermissionFactory;
@Autowired
protected AclAuthorizationStrategy aclAuthorizationStrategy;
@Autowired
protected AuditLogger auditLogger;
public AclService() throws IOException {
String metadataUrl = KylinConfig.getInstanceFromEnv().getMetadataUrl();
// split TABLE@HBASE_URL
int cut = metadataUrl.indexOf('@');
tableNameBase = cut < 0 ? DEFAULT_TABLE_PREFIX : metadataUrl.substring(0, cut);
hbaseUrl = cut < 0 ? metadataUrl : metadataUrl.substring(cut + 1);
aclTableName = tableNameBase + ACL_TABLE_NAME;
fieldAces.setAccessible(true);
fieldAcl.setAccessible(true);
HBaseConnection.createHTableIfNeeded(hbaseUrl, aclTableName, ACL_INFO_FAMILY, ACL_ACES_FAMILY);
}
@Override
public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
HTableInterface htable = null;
try {
htable = HBaseConnection.get(hbaseUrl).getTable(aclTableName);
Scan scan = new Scan();
SingleColumnValueFilter parentFilter = new SingleColumnValueFilter(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), CompareOp.EQUAL, domainObjSerializer.serialize(new DomainObjectInfo(parentIdentity)));
parentFilter.setFilterIfMissing(true);
scan.setFilter(parentFilter);
ResultScanner scanner = htable.getScanner(scan);
for (Result result = scanner.next(); result != null; result = scanner.next()) {
String id = Bytes.toString(result.getRow());
String type = Bytes.toString(result.getValue(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN)));
oids.add(new ObjectIdentityImpl(type, id));
}
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(htable);
}
return oids;
}
@Override
public Acl readAclById(ObjectIdentity object) throws NotFoundException {
Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), null);
// Assert.isTrue(aclsMap.containsKey(object), "There should have been an Acl entry for ObjectIdentity " + object);
return aclsMap.get(object);
}
@Override
public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundException {
Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), sids);
Assert.isTrue(aclsMap.containsKey(object), "There should have been an Acl entry for ObjectIdentity " + object);
return aclsMap.get(object);
}
@Override
public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> objects) throws NotFoundException {
return readAclsById(objects, null);
}
@Override
public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
HTableInterface htable = null;
Result result = null;
try {
htable = HBaseConnection.get(hbaseUrl).getTable(aclTableName);
for (ObjectIdentity oid : oids) {
result = htable.get(new Get(Bytes.toBytes(String.valueOf(oid.getIdentifier()))));
if (null != result && !result.isEmpty()) {
SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN)));
Sid ownerSid = (null == owner) ? null : (owner.isPrincipal() ? new PrincipalSid(owner.getSid()) : new GrantedAuthoritySid(owner.getSid()));
boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
Acl parentAcl = null;
DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN)));
if (null != parentInfo) {
ObjectIdentity parentObj = new ObjectIdentityImpl(parentInfo.getType(), parentInfo.getId());
parentAcl = readAclById(parentObj, null);
}
AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy, permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
genAces(sids, result, acl);
aclMaps.put(oid, acl);
} else {
throw new NotFoundException("Unable to find ACL information for object identity '" + oid + "'");
}
}
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(htable);
}
return aclMaps;
}
@Override
public MutableAcl createAcl(ObjectIdentity objectIdentity) throws AlreadyExistsException {
Acl acl = null;
try {
acl = readAclById(objectIdentity);
} catch (NotFoundException e) {
}
if (null != acl) {
throw new AlreadyExistsException("ACL of " + objectIdentity + " exists!");
}
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
PrincipalSid sid = new PrincipalSid(auth);
HTableInterface htable = null;
try {
htable = HBaseConnection.get(hbaseUrl).getTable(aclTableName);
Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
put.add(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
put.add(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
put.add(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
htable.put(put);
htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " created successfully.");
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(htable);
}
return (MutableAcl) readAclById(objectIdentity);
}
@Override
public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
HTableInterface htable = null;
try {
htable = HBaseConnection.get(hbaseUrl).getTable(aclTableName);
Delete delete = new Delete(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
List<ObjectIdentity> children = findChildren(objectIdentity);
if (!deleteChildren && children.size() > 0) {
throw new ChildrenExistException("Children exists for " + objectIdentity);
}
for (ObjectIdentity oid : children) {
deleteAcl(oid, deleteChildren);
}
htable.delete(delete);
htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " deleted successfully.");
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(htable);
}
}
@Override
public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException {
try {
readAclById(acl.getObjectIdentity());
} catch (NotFoundException e) {
throw e;
}
HTableInterface htable = null;
try {
htable = HBaseConnection.get(hbaseUrl).getTable(aclTableName);
Delete delete = new Delete(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
delete.deleteFamily(Bytes.toBytes(ACL_ACES_FAMILY));
htable.delete(delete);
Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
if (null != acl.getParentAcl()) {
put.add(Bytes.toBytes(ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
}
for (AccessControlEntry ace : acl.getEntries()) {
AceInfo aceInfo = new AceInfo(ace);
put.add(Bytes.toBytes(ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
}
if (!put.isEmpty()) {
htable.put(put);
htable.flushCommits();
logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
}
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(htable);
}
return (MutableAcl) readAclById(acl.getObjectIdentity());
}
private void genAces(List<Sid> sids, Result result, AclImpl acl) throws JsonParseException, JsonMappingException, IOException {
List<AceInfo> aceInfos = new ArrayList<AceInfo>();
if (null != sids) {
// Just return aces in sids
for (Sid sid : sids) {
String sidName = null;
if (sid instanceof PrincipalSid) {
sidName = ((PrincipalSid) sid).getPrincipal();
} else if (sid instanceof GrantedAuthoritySid) {
sidName = ((GrantedAuthoritySid) sid).getGrantedAuthority();
}
AceInfo aceInfo = aceSerializer.deserialize(result.getValue(Bytes.toBytes(ACL_ACES_FAMILY), Bytes.toBytes(sidName)));
if (null != aceInfo) {
aceInfos.add(aceInfo);
}
}
} else {
NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(Bytes.toBytes(ACL_ACES_FAMILY));
for (byte[] qualifier : familyMap.keySet()) {
AceInfo aceInfo = aceSerializer.deserialize(familyMap.get(qualifier));
if (null != aceInfo) {
aceInfos.add(aceInfo);
}
}
}
List<AccessControlEntry> newAces = new ArrayList<AccessControlEntry>();
for (int i = 0; i < aceInfos.size(); i++) {
AceInfo aceInfo = aceInfos.get(i);
if (null != aceInfo) {
Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid()) : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid, aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
newAces.add(ace);
}
}
this.setAces(acl, newAces);
}
private void setAces(AclImpl acl, List<AccessControlEntry> aces) {
try {
fieldAces.set(acl, aces);
} catch (IllegalAccessException e) {
throw new IllegalStateException("Could not set AclImpl entries", e);
}
}
protected static class DomainObjectInfo {
private String id;
private String type;
public DomainObjectInfo() {
}
public DomainObjectInfo(ObjectIdentity oid) {
super();
this.id = (String) oid.getIdentifier();
this.type = oid.getType();
}
public Serializable getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
protected static class SidInfo {
private String sid;
private boolean isPrincipal;
public SidInfo() {
}
public SidInfo(Sid sid) {
if (sid instanceof PrincipalSid) {
this.sid = ((PrincipalSid) sid).getPrincipal();
this.isPrincipal = true;
} else if (sid instanceof GrantedAuthoritySid) {
this.sid = ((GrantedAuthoritySid) sid).getGrantedAuthority();
this.isPrincipal = false;
}
}
public String getSid() {
return sid;
}
public void setSid(String sid) {
this.sid = sid;
}
public boolean isPrincipal() {
return isPrincipal;
}
public void setPrincipal(boolean isPrincipal) {
this.isPrincipal = isPrincipal;
}
}
protected static class AceInfo {
private SidInfo sidInfo;
private int permissionMask;
public AceInfo() {
}
public AceInfo(AccessControlEntry ace) {
super();
this.sidInfo = new SidInfo(ace.getSid());
this.permissionMask = ace.getPermission().getMask();
}
public SidInfo getSidInfo() {
return sidInfo;
}
public void setSidInfo(SidInfo sidInfo) {
this.sidInfo = sidInfo;
}
public int getPermissionMask() {
return permissionMask;
}
public void setPermissionMask(int permissionMask) {
this.permissionMask = permissionMask;
}
}
}
| |
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dialogflow.v2;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.api.resourcenames.ResourceNameType;
import java.util.Map;
import java.util.ArrayList;
import java.util.List;
// AUTO-GENERATED DOCUMENTATION AND CLASS
@javax.annotation.Generated("by GAPIC protoc plugin")
public class SessionEntityTypeName implements ResourceName {
private static final PathTemplate PATH_TEMPLATE =
PathTemplate.createWithoutUrlEncoding("projects/{project}/agent/sessions/{session}/entityTypes/{entity_type}");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String session;
private final String entityType;
public String getProject() {
return project;
}
public String getSession() {
return session;
}
public String getEntityType() {
return entityType;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
private SessionEntityTypeName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
session = Preconditions.checkNotNull(builder.getSession());
entityType = Preconditions.checkNotNull(builder.getEntityType());
}
public static SessionEntityTypeName of(String project, String session, String entityType) {
return newBuilder()
.setProject(project)
.setSession(session)
.setEntityType(entityType)
.build();
}
public static String format(String project, String session, String entityType) {
return newBuilder()
.setProject(project)
.setSession(session)
.setEntityType(entityType)
.build()
.toString();
}
public static SessionEntityTypeName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PATH_TEMPLATE.validatedMatch(formattedString, "SessionEntityTypeName.parse: formattedString not in valid format");
return of(matchMap.get("project"), matchMap.get("session"), matchMap.get("entity_type"));
}
public static List<SessionEntityTypeName> parseList(List<String> formattedStrings) {
List<SessionEntityTypeName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<SessionEntityTypeName> values) {
List<String> list = new ArrayList<String>(values.size());
for (SessionEntityTypeName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PATH_TEMPLATE.matches(formattedString);
}
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
fieldMapBuilder.put("project", project);
fieldMapBuilder.put("session", session);
fieldMapBuilder.put("entityType", entityType);
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
/**
* @deprecated This method is only present to satisfy the ResourceName interface.
*/
@Deprecated
public ResourceNameType getType() {
throw new UnsupportedOperationException("SessionEntityTypeName.getType() not supported");
}
@Override
public String toString() {
return PATH_TEMPLATE.instantiate("project", project, "session", session, "entity_type", entityType);
}
/** Builder for SessionEntityTypeName. */
public static class Builder {
private String project;
private String session;
private String entityType;
public String getProject() {
return project;
}
public String getSession() {
return session;
}
public String getEntityType() {
return entityType;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setSession(String session) {
this.session = session;
return this;
}
public Builder setEntityType(String entityType) {
this.entityType = entityType;
return this;
}
private Builder() {
}
private Builder(SessionEntityTypeName sessionEntityTypeName) {
project = sessionEntityTypeName.project;
session = sessionEntityTypeName.session;
entityType = sessionEntityTypeName.entityType;
}
public SessionEntityTypeName build() {
return new SessionEntityTypeName(this);
}
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof SessionEntityTypeName) {
SessionEntityTypeName that = (SessionEntityTypeName) o;
return (this.project.equals(that.project))
&& (this.session.equals(that.session))
&& (this.entityType.equals(that.entityType));
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= project.hashCode();
h *= 1000003;
h ^= session.hashCode();
h *= 1000003;
h ^= entityType.hashCode();
return h;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.proxy;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.List;
import net.sf.cglib.core.DefaultNamingPolicy;
import net.sf.cglib.core.Predicate;
import net.sf.cglib.proxy.Enhancer;
import net.sf.cglib.proxy.MethodInterceptor;
import net.sf.cglib.proxy.MethodProxy;
import org.apache.wicket.Application;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.application.IClassResolver;
import org.apache.wicket.core.util.lang.WicketObjects;
import org.apache.wicket.model.IModel;
import org.apache.wicket.util.io.IClusterable;
/**
* A factory class that creates lazy init proxies given a type and a {@link IProxyTargetLocator}
* used to retrieve the object the proxy will represent.
* <p>
* A lazy init proxy waits until the first method invocation before it uses the
* {@link IProxyTargetLocator} to retrieve the object to which the method invocation will be
* forwarded.
* <p>
* This factory creates two kinds of proxies: A standard dynamic proxy when the specified type is an
* interface, and a CGLib proxy when the specified type is a concrete class.
* <p>
* The general use case for such a proxy is to represent a dependency that should not be serialized
* with a wicket page or {@link IModel}. The solution is to serialize the proxy and the
* {@link IProxyTargetLocator} instead of the dependency, and be able to look up the target object
* again when the proxy is deserialized and accessed. A good strategy for achieving this is to have
* a static lookup in the {@link IProxyTargetLocator}, this keeps its size small and makes it safe
* to serialize.
* <p>
* Example:
*
* <pre>
* class UserServiceLocator implements IProxyTargetLocator
* {
* public static final IProxyTargetLocator INSTANCE = new UserServiceLocator();
*
* Object locateProxyObject()
* {
* MyApplication app = (MyApplication)Application.get();
* return app.getUserService();
* }
* }
*
* class UserDetachableModel extends LoadableDetachableModel
* {
* private UserService svc;
*
* private long userId;
*
* public UserDetachableModel(long userId, UserService svc)
* {
* this.userId = userId;
* this.svc = svc;
* }
*
* public Object load()
* {
* return svc.loadUser(userId);
* }
* }
*
* UserService service = LazyInitProxyFactory.createProxy(UserService.class,
* UserServiceLocator.INSTANCE);
*
* UserDetachableModel model = new UserDetachableModel(10, service);
*
* </pre>
*
* The detachable model in the example above follows to good citizen pattern and is easy to unit
* test. These are the advantages gained through the use of the lazy init proxies.
*
* @author Igor Vaynberg (ivaynberg)
*
*/
public class LazyInitProxyFactory
{
/**
* Primitive java types and their object wrappers
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private static final List PRIMITIVES = Arrays.asList(String.class, byte.class, Byte.class,
short.class, Short.class, int.class, Integer.class, long.class, Long.class, float.class,
Float.class, double.class, Double.class, char.class, Character.class, boolean.class,
Boolean.class);
/**
* Create a lazy init proxy for the specified type. The target object will be located using the
* provided locator upon first method invocation.
*
* @param type
* type that proxy will represent
*
* @param locator
* object locator that will locate the object the proxy represents
*
* @return lazily initializable proxy
*/
public static Object createProxy(final Class<?> type, final IProxyTargetLocator locator)
{
if (PRIMITIVES.contains(type) || Enum.class.isAssignableFrom(type))
{
// We special-case primitives as sometimes people use these as
// SpringBeans (WICKET-603, WICKET-906). Go figure.
return locator.locateProxyTarget();
}
else if (type.isInterface())
{
JdkHandler handler = new JdkHandler(type, locator);
try
{
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (Application.exists())
{
IClassResolver classResolver = Application.get()
.getApplicationSettings()
.getClassResolver();
if (classResolver != null)
{
classLoader = classResolver.getClassLoader();
}
}
return Proxy.newProxyInstance(classLoader,
new Class[] { type, Serializable.class, ILazyInitProxy.class,
IWriteReplace.class }, handler);
}
catch (IllegalArgumentException e)
{
/*
* STW: In some clustering environments it appears the context classloader fails to
* load the proxied interface (currently seen in BEA WLS 9.x clusters). If this
* happens, we can try and fall back to the classloader (current) that actually
* loaded this class.
*/
return Proxy.newProxyInstance(LazyInitProxyFactory.class.getClassLoader(),
new Class[] { type, Serializable.class, ILazyInitProxy.class,
IWriteReplace.class }, handler);
}
}
else
{
CGLibInterceptor handler = new CGLibInterceptor(type, locator);
Enhancer e = new Enhancer();
e.setInterfaces(new Class[] { Serializable.class, ILazyInitProxy.class,
IWriteReplace.class });
e.setSuperclass(type);
e.setCallback(handler);
e.setNamingPolicy(new DefaultNamingPolicy()
{
@Override
public String getClassName(final String prefix, final String source,
final Object key, final Predicate names)
{
return super.getClassName("WICKET_" + prefix, source, key, names);
}
});
return e.create();
}
}
/**
* This interface is used to make the proxy forward writeReplace() call to the handler instead
* of invoking it on itself. This allows us to serialize the replacement object instead of the
* proxy itself in case the proxy subclass is deserialized on a VM that does not have it
* created.
*
* @see ProxyReplacement
*
* @author Igor Vaynberg (ivaynberg)
*
*/
public static interface IWriteReplace
{
/**
* write replace method as defined by Serializable
*
* @return object that will replace this object in serialized state
* @throws ObjectStreamException
*/
Object writeReplace() throws ObjectStreamException;
}
/**
* Object that replaces the proxy when it is serialized. Upon deserialization this object will
* create a new proxy with the same locator.
*
* @author Igor Vaynberg (ivaynberg)
*
*/
static class ProxyReplacement implements IClusterable
{
private static final long serialVersionUID = 1L;
private final IProxyTargetLocator locator;
private final String type;
/**
* Constructor
*
* @param type
* @param locator
*/
public ProxyReplacement(final String type, final IProxyTargetLocator locator)
{
this.type = type;
this.locator = locator;
}
private Object readResolve() throws ObjectStreamException
{
Class<?> clazz = WicketObjects.resolveClass(type);
if (clazz == null)
{
ClassNotFoundException cause = new ClassNotFoundException(
"Could not resolve type [" + type +
"] with the currently configured org.apache.wicket.application.IClassResolver");
throw new WicketRuntimeException(cause);
}
return LazyInitProxyFactory.createProxy(clazz, locator);
}
}
/**
* Method interceptor for proxies representing concrete object not backed by an interface. These
* proxies are representing by cglib proxies.
*
* @author Igor Vaynberg (ivaynberg)
*
*/
private static class CGLibInterceptor
implements
MethodInterceptor,
ILazyInitProxy,
Serializable,
IWriteReplace
{
private static final long serialVersionUID = 1L;
private final IProxyTargetLocator locator;
private final String typeName;
private transient Object target;
/**
* Constructor
*
* @param type
* class of the object this proxy was created for
*
* @param locator
* object locator used to locate the object this proxy represents
*/
public CGLibInterceptor(final Class<?> type, final IProxyTargetLocator locator)
{
super();
typeName = type.getName();
this.locator = locator;
}
/**
* @see net.sf.cglib.proxy.MethodInterceptor#intercept(java.lang.Object,
* java.lang.reflect.Method, java.lang.Object[], net.sf.cglib.proxy.MethodProxy)
*/
@Override
public Object intercept(final Object object, final Method method, final Object[] args,
final MethodProxy proxy) throws Throwable
{
if (isFinalizeMethod(method))
{
// swallow finalize call
return null;
}
else if (isEqualsMethod(method))
{
return (equals(args[0])) ? Boolean.TRUE : Boolean.FALSE;
}
else if (isHashCodeMethod(method))
{
return hashCode();
}
else if (isToStringMethod(method))
{
return toString();
}
else if (isWriteReplaceMethod(method))
{
return writeReplace();
}
else if (method.getDeclaringClass().equals(ILazyInitProxy.class))
{
return getObjectLocator();
}
if (target == null)
{
target = locator.locateProxyTarget();
}
return proxy.invoke(target, args);
}
/**
* @see org.apache.wicket.proxy.ILazyInitProxy#getObjectLocator()
*/
@Override
public IProxyTargetLocator getObjectLocator()
{
return locator;
}
/**
* @see org.apache.wicket.proxy.LazyInitProxyFactory.IWriteReplace#writeReplace()
*/
@Override
public Object writeReplace() throws ObjectStreamException
{
return new ProxyReplacement(typeName, locator);
}
}
/**
* Invocation handler for proxies representing interface based object. For interface backed
* objects dynamic jdk proxies are used.
*
* @author Igor Vaynberg (ivaynberg)
*
*/
private static class JdkHandler
implements
InvocationHandler,
ILazyInitProxy,
Serializable,
IWriteReplace
{
private static final long serialVersionUID = 1L;
private final IProxyTargetLocator locator;
private final String typeName;
private transient Object target;
/**
* Constructor
*
* @param type
* class of object this handler will represent
*
* @param locator
* object locator used to locate the object this proxy represents
*/
public JdkHandler(final Class<?> type, final IProxyTargetLocator locator)
{
super();
this.locator = locator;
typeName = type.getName();
}
/**
* @see java.lang.reflect.InvocationHandler#invoke(java.lang.Object,
* java.lang.reflect.Method, java.lang.Object[])
*/
@Override
public Object invoke(final Object proxy, final Method method, final Object[] args)
throws Throwable
{
if (isFinalizeMethod(method))
{
// swallow finalize call
return null;
}
else if (isEqualsMethod(method))
{
return (equals(args[0])) ? Boolean.TRUE : Boolean.FALSE;
}
else if (isHashCodeMethod(method))
{
return hashCode();
}
else if (isToStringMethod(method))
{
return toString();
}
else if (method.getDeclaringClass().equals(ILazyInitProxy.class))
{
return getObjectLocator();
}
else if (isWriteReplaceMethod(method))
{
return writeReplace();
}
if (target == null)
{
target = locator.locateProxyTarget();
}
try
{
method.setAccessible(true);
return method.invoke(target, args);
}
catch (InvocationTargetException e)
{
throw e.getTargetException();
}
}
/**
* @see org.apache.wicket.proxy.ILazyInitProxy#getObjectLocator()
*/
@Override
public IProxyTargetLocator getObjectLocator()
{
return locator;
}
/**
* @see org.apache.wicket.proxy.LazyInitProxyFactory.IWriteReplace#writeReplace()
*/
@Override
public Object writeReplace() throws ObjectStreamException
{
return new ProxyReplacement(typeName, locator);
}
}
/**
* Checks if the method is derived from Object.equals()
*
* @param method
* method being tested
* @return true if the method is derived from Object.equals(), false otherwise
*/
public static boolean isEqualsMethod(final Method method)
{
return (method.getReturnType() == boolean.class) &&
(method.getParameterTypes().length == 1) &&
(method.getParameterTypes()[0] == Object.class) && method.getName().equals("equals");
}
/**
* Checks if the method is derived from Object.hashCode()
*
* @param method
* method being tested
* @return true if the method is defined from Object.hashCode(), false otherwise
*/
public static boolean isHashCodeMethod(final Method method)
{
return (method.getReturnType() == int.class) && (method.getParameterTypes().length == 0) &&
method.getName().equals("hashCode");
}
/**
* Checks if the method is derived from Object.toString()
*
* @param method
* method being tested
* @return true if the method is defined from Object.toString(), false otherwise
*/
public static boolean isToStringMethod(final Method method)
{
return (method.getReturnType() == String.class) &&
(method.getParameterTypes().length == 0) && method.getName().equals("toString");
}
/**
* Checks if the method is derived from Object.finalize()
*
* @param method
* method being tested
* @return true if the method is defined from Object.finalize(), false otherwise
*/
public static boolean isFinalizeMethod(final Method method)
{
return (method.getReturnType() == void.class) && (method.getParameterTypes().length == 0) &&
method.getName().equals("finalize");
}
/**
* Checks if the method is the writeReplace method
*
* @param method
* method being tested
* @return true if the method is the writeReplace method, false otherwise
*/
public static boolean isWriteReplaceMethod(final Method method)
{
return (method.getReturnType() == Object.class) &&
(method.getParameterTypes().length == 0) && method.getName().equals("writeReplace");
}
}
| |
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.beans;
import java.util.*;
import java.lang.reflect.*;
import java.util.Objects;
import sun.reflect.misc.*;
/**
* The <code>DefaultPersistenceDelegate</code> is a concrete implementation of
* the abstract <code>PersistenceDelegate</code> class and
* is the delegate used by default for classes about
* which no information is available. The <code>DefaultPersistenceDelegate</code>
* provides, version resilient, public API-based persistence for
* classes that follow the JavaBeans™ conventions without any class specific
* configuration.
* <p>
* The key assumptions are that the class has a nullary constructor
* and that its state is accurately represented by matching pairs
* of "setter" and "getter" methods in the order they are returned
* by the Introspector.
* In addition to providing code-free persistence for JavaBeans,
* the <code>DefaultPersistenceDelegate</code> provides a convenient means
* to effect persistent storage for classes that have a constructor
* that, while not nullary, simply requires some property values
* as arguments.
*
* @see #DefaultPersistenceDelegate(String[])
* @see java.beans.Introspector
*
* @since 1.4
*
* @author Philip Milne
*/
public class DefaultPersistenceDelegate extends PersistenceDelegate {
private static final String[] EMPTY = {};
private final String[] constructor;
private Boolean definesEquals;
/**
* Creates a persistence delegate for a class with a nullary constructor.
*
* @see #DefaultPersistenceDelegate(java.lang.String[])
*/
public DefaultPersistenceDelegate() {
this.constructor = EMPTY;
}
/**
* Creates a default persistence delegate for a class with a
* constructor whose arguments are the values of the property
* names as specified by <code>constructorPropertyNames</code>.
* The constructor arguments are created by
* evaluating the property names in the order they are supplied.
* To use this class to specify a single preferred constructor for use
* in the serialization of a particular type, we state the
* names of the properties that make up the constructor's
* arguments. For example, the <code>Font</code> class which
* does not define a nullary constructor can be handled
* with the following persistence delegate:
*
* <pre>
* new DefaultPersistenceDelegate(new String[]{"name", "style", "size"});
* </pre>
*
* @param constructorPropertyNames The property names for the arguments of this constructor.
*
* @see #instantiate
*/
public DefaultPersistenceDelegate(String[] constructorPropertyNames) {
this.constructor = (constructorPropertyNames == null) ? EMPTY : constructorPropertyNames.clone();
}
private static boolean definesEquals(Class<?> type) {
try {
return type == type.getMethod("equals", Object.class).getDeclaringClass();
}
catch(NoSuchMethodException e) {
return false;
}
}
private boolean definesEquals(Object instance) {
if (definesEquals != null) {
return (definesEquals == Boolean.TRUE);
}
else {
boolean result = definesEquals(instance.getClass());
definesEquals = result ? Boolean.TRUE : Boolean.FALSE;
return result;
}
}
/**
* If the number of arguments in the specified constructor is non-zero and
* the class of <code>oldInstance</code> explicitly declares an "equals" method
* this method returns the value of <code>oldInstance.equals(newInstance)</code>.
* Otherwise, this method uses the superclass's definition which returns true if the
* classes of the two instances are equal.
*
* @param oldInstance The instance to be copied.
* @param newInstance The instance that is to be modified.
* @return True if an equivalent copy of <code>newInstance</code> may be
* created by applying a series of mutations to <code>oldInstance</code>.
*
* @see #DefaultPersistenceDelegate(String[])
*/
protected boolean mutatesTo(Object oldInstance, Object newInstance) {
// Assume the instance is either mutable or a singleton
// if it has a nullary constructor.
return (constructor.length == 0) || !definesEquals(oldInstance) ?
super.mutatesTo(oldInstance, newInstance) :
oldInstance.equals(newInstance);
}
/**
* This default implementation of the <code>instantiate</code> method returns
* an expression containing the predefined method name "new" which denotes a
* call to a constructor with the arguments as specified in
* the <code>DefaultPersistenceDelegate</code>'s constructor.
*
* @param oldInstance The instance to be instantiated.
* @param out The code output stream.
* @return An expression whose value is <code>oldInstance</code>.
*
* @throws NullPointerException if {@code out} is {@code null}
* and this value is used in the method
*
* @see #DefaultPersistenceDelegate(String[])
*/
protected Expression instantiate(Object oldInstance, Encoder out) {
int nArgs = constructor.length;
Class<?> type = oldInstance.getClass();
Object[] constructorArgs = new Object[nArgs];
for(int i = 0; i < nArgs; i++) {
try {
Method method = findMethod(type, this.constructor[i]);
constructorArgs[i] = MethodUtil.invoke(method, oldInstance, new Object[0]);
}
catch (Exception e) {
out.getExceptionListener().exceptionThrown(e);
}
}
return new Expression(oldInstance, oldInstance.getClass(), "new", constructorArgs);
}
private Method findMethod(Class<?> type, String property) {
if (property == null) {
throw new IllegalArgumentException("Property name is null");
}
PropertyDescriptor pd = getPropertyDescriptor(type, property);
if (pd == null) {
throw new IllegalStateException("Could not find property by the name " + property);
}
Method method = pd.getReadMethod();
if (method == null) {
throw new IllegalStateException("Could not find getter for the property " + property);
}
return method;
}
private void doProperty(Class<?> type, PropertyDescriptor pd, Object oldInstance, Object newInstance, Encoder out) throws Exception {
Method getter = pd.getReadMethod();
Method setter = pd.getWriteMethod();
if (getter != null && setter != null) {
Expression oldGetExp = new Expression(oldInstance, getter.getName(), new Object[]{});
Expression newGetExp = new Expression(newInstance, getter.getName(), new Object[]{});
Object oldValue = oldGetExp.getValue();
Object newValue = newGetExp.getValue();
out.writeExpression(oldGetExp);
if (!Objects.equals(newValue, out.get(oldValue))) {
// Search for a static constant with this value;
Object e = (Object[])pd.getValue("enumerationValues");
if (e instanceof Object[] && Array.getLength(e) % 3 == 0) {
Object[] a = (Object[])e;
for(int i = 0; i < a.length; i = i + 3) {
try {
Field f = type.getField((String)a[i]);
if (f.get(null).equals(oldValue)) {
out.remove(oldValue);
out.writeExpression(new Expression(oldValue, f, "get", new Object[]{null}));
}
}
catch (Exception ex) {}
}
}
invokeStatement(oldInstance, setter.getName(), new Object[]{oldValue}, out);
}
}
}
static void invokeStatement(Object instance, String methodName, Object[] args, Encoder out) {
out.writeStatement(new Statement(instance, methodName, args));
}
// Write out the properties of this instance.
private void initBean(Class<?> type, Object oldInstance, Object newInstance, Encoder out) {
for (Field field : type.getFields()) {
if (!ReflectUtil.isPackageAccessible(field.getDeclaringClass())) {
continue;
}
int mod = field.getModifiers();
if (Modifier.isFinal(mod) || Modifier.isStatic(mod) || Modifier.isTransient(mod)) {
continue;
}
try {
Expression oldGetExp = new Expression(field, "get", new Object[] { oldInstance });
Expression newGetExp = new Expression(field, "get", new Object[] { newInstance });
Object oldValue = oldGetExp.getValue();
Object newValue = newGetExp.getValue();
out.writeExpression(oldGetExp);
if (!Objects.equals(newValue, out.get(oldValue))) {
out.writeStatement(new Statement(field, "set", new Object[] { oldInstance, oldValue }));
}
}
catch (Exception exception) {
out.getExceptionListener().exceptionThrown(exception);
}
}
BeanInfo info;
try {
info = Introspector.getBeanInfo(type);
} catch (IntrospectionException exception) {
return;
}
// Properties
for (PropertyDescriptor d : info.getPropertyDescriptors()) {
if (d.isTransient()) {
continue;
}
try {
doProperty(type, d, oldInstance, newInstance, out);
}
catch (Exception e) {
out.getExceptionListener().exceptionThrown(e);
}
}
// Listeners
/*
Pending(milne). There is a general problem with the archival of
listeners which is unresolved as of 1.4. Many of the methods
which install one object inside another (typically "add" methods
or setters) automatically install a listener on the "child" object
so that its "parent" may respond to changes that are made to it.
For example the JTable:setModel() method automatically adds a
TableModelListener (the JTable itself in this case) to the supplied
table model.
We do not need to explicitly add these listeners to the model in an
archive as they will be added automatically by, in the above case,
the JTable's "setModel" method. In some cases, we must specifically
avoid trying to do this since the listener may be an inner class
that cannot be instantiated using public API.
No general mechanism currently
exists for differentiating between these kind of listeners and
those which were added explicitly by the user. A mechanism must
be created to provide a general means to differentiate these
special cases so as to provide reliable persistence of listeners
for the general case.
*/
if (!java.awt.Component.class.isAssignableFrom(type)) {
return; // Just handle the listeners of Components for now.
}
for (EventSetDescriptor d : info.getEventSetDescriptors()) {
if (d.isTransient()) {
continue;
}
Class<?> listenerType = d.getListenerType();
// The ComponentListener is added automatically, when
// Contatiner:add is called on the parent.
if (listenerType == java.awt.event.ComponentListener.class) {
continue;
}
// JMenuItems have a change listener added to them in
// their "add" methods to enable accessibility support -
// see the add method in JMenuItem for details. We cannot
// instantiate this instance as it is a private inner class
// and do not need to do this anyway since it will be created
// and installed by the "add" method. Special case this for now,
// ignoring all change listeners on JMenuItems.
if (listenerType == javax.swing.event.ChangeListener.class &&
type == javax.swing.JMenuItem.class) {
continue;
}
EventListener[] oldL = new EventListener[0];
EventListener[] newL = new EventListener[0];
try {
Method m = d.getGetListenerMethod();
oldL = (EventListener[])MethodUtil.invoke(m, oldInstance, new Object[]{});
newL = (EventListener[])MethodUtil.invoke(m, newInstance, new Object[]{});
}
catch (Exception e2) {
try {
Method m = type.getMethod("getListeners", new Class<?>[]{Class.class});
oldL = (EventListener[])MethodUtil.invoke(m, oldInstance, new Object[]{listenerType});
newL = (EventListener[])MethodUtil.invoke(m, newInstance, new Object[]{listenerType});
}
catch (Exception e3) {
return;
}
}
// Asssume the listeners are in the same order and that there are no gaps.
// Eventually, this may need to do true differencing.
String addListenerMethodName = d.getAddListenerMethod().getName();
for (int i = newL.length; i < oldL.length; i++) {
// System.out.println("Adding listener: " + addListenerMethodName + oldL[i]);
invokeStatement(oldInstance, addListenerMethodName, new Object[]{oldL[i]}, out);
}
String removeListenerMethodName = d.getRemoveListenerMethod().getName();
for (int i = oldL.length; i < newL.length; i++) {
invokeStatement(oldInstance, removeListenerMethodName, new Object[]{newL[i]}, out);
}
}
}
/**
* This default implementation of the <code>initialize</code> method assumes
* all state held in objects of this type is exposed via the
* matching pairs of "setter" and "getter" methods in the order
* they are returned by the Introspector. If a property descriptor
* defines a "transient" attribute with a value equal to
* <code>Boolean.TRUE</code> the property is ignored by this
* default implementation. Note that this use of the word
* "transient" is quite independent of the field modifier
* that is used by the <code>ObjectOutputStream</code>.
* <p>
* For each non-transient property, an expression is created
* in which the nullary "getter" method is applied
* to the <code>oldInstance</code>. The value of this
* expression is the value of the property in the instance that is
* being serialized. If the value of this expression
* in the cloned environment <code>mutatesTo</code> the
* target value, the new value is initialized to make it
* equivalent to the old value. In this case, because
* the property value has not changed there is no need to
* call the corresponding "setter" method and no statement
* is emitted. If not however, the expression for this value
* is replaced with another expression (normally a constructor)
* and the corresponding "setter" method is called to install
* the new property value in the object. This scheme removes
* default information from the output produced by streams
* using this delegate.
* <p>
* In passing these statements to the output stream, where they
* will be executed, side effects are made to the <code>newInstance</code>.
* In most cases this allows the problem of properties
* whose values depend on each other to actually help the
* serialization process by making the number of statements
* that need to be written to the output smaller. In general,
* the problem of handling interdependent properties is reduced to
* that of finding an order for the properties in
* a class such that no property value depends on the value of
* a subsequent property.
*
* @param type the type of the instances
* @param oldInstance The instance to be copied.
* @param newInstance The instance that is to be modified.
* @param out The stream to which any initialization statements should be written.
*
* @throws NullPointerException if {@code out} is {@code null}
*
* @see java.beans.Introspector#getBeanInfo
* @see java.beans.PropertyDescriptor
*/
protected void initialize(Class<?> type,
Object oldInstance, Object newInstance,
Encoder out)
{
// System.out.println("DefulatPD:initialize" + type);
super.initialize(type, oldInstance, newInstance, out);
if (oldInstance.getClass() == type) { // !type.isInterface()) {
initBean(type, oldInstance, newInstance, out);
}
}
private static PropertyDescriptor getPropertyDescriptor(Class<?> type, String property) {
try {
for (PropertyDescriptor pd : Introspector.getBeanInfo(type).getPropertyDescriptors()) {
if (property.equals(pd.getName()))
return pd;
}
} catch (IntrospectionException exception) {
}
return null;
}
}
| |
package com.adstoreapp.stormy.ui;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.adstoreapp.stormy.R;
import com.adstoreapp.stormy.weather.Current;
import com.adstoreapp.stormy.weather.Day;
import com.adstoreapp.stormy.weather.Forecast;
import com.adstoreapp.stormy.weather.Hour;
import com.squareup.okhttp.Call;
import com.squareup.okhttp.Callback;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import butterknife.OnClick;
public class MainActivity extends Activity {
public static final String TAG = MainActivity.class.getSimpleName();
public static final String DAILY_FORECAST = "DAILY FORECAST";
public static final String HOURLY_FORECAST = "HOURLY FORECAST";
private Forecast mForecast;
private TextView mTemperatureLabel;
private TextView mTimeLabel;
private TextView mSummaryLabel;
private TextView mHumidityValue;
private TextView mPrecipValue;
private ImageView mIconView;
private ImageView mRefreshImageView;
private Button mDailyButton;
private Button mHourlyButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTemperatureLabel = (TextView) findViewById(R.id.temperatureLabel);
mTimeLabel = (TextView) findViewById(R.id.timeLabel);
mSummaryLabel = (TextView) findViewById(R.id.summaryText);
mHumidityValue = (TextView) findViewById(R.id.humidityValue);
mPrecipValue = (TextView) findViewById(R.id.precipValue);
mIconView = (ImageView) findViewById(R.id.iconImageView);
mRefreshImageView = (ImageView) findViewById(R.id.refreshImageView);
mDailyButton = (Button) findViewById(R.id.DailyButton);
mHourlyButton = (Button) findViewById(R.id.Hourly);
mRefreshImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
refreshWeather();
}
});
refreshWeather();
Log.d(TAG, "Running MainActivity UI code");
mDailyButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, DailyActivity.class);
intent.putExtra(DAILY_FORECAST, mForecast.getDailyForecast());
startActivity(intent);
}
});
mHourlyButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, HourlyForecastActivity.class);
intent.putExtra(HOURLY_FORECAST, mForecast.getHourlyForecast());
startActivity(intent);
}
});
}
private void refreshWeather() {
String apiKey = "e22e9dff1871f50e4f3f26f740f13f04";
double latitude = 37.8267;
double longitude = -122.423;
String forecastUrl = "https://api.forecast.io/forecast/" + apiKey + "/" + latitude + "," + longitude ;
if(isNetwrokAvailable()) {
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder().url(forecastUrl).build();
Call call = client.newCall(request);
call.enqueue(new Callback() {
@Override
public void onFailure(Request request, IOException e) {
}
@Override
public void onResponse(Response response) throws IOException {
try {
String jsonData = response.body().string();
Log.v(TAG, jsonData);
if (response.isSuccessful()) {
mForecast = parseForecastDetails(jsonData);
runOnUiThread(new Runnable() {
@Override
public void run() {
getUpdatedWeather();
}
});
} else {
alertUserAboutError();
}
} catch (IOException e) {
Log.e(TAG, "Exception Caught", e);
}
catch (JSONException e) {
Log.e(TAG, "Exception Caught", e);
}
}
});
} else {
Toast.makeText(this, "Network is not available", Toast.LENGTH_LONG).show();
}
}
private void getUpdatedWeather() {
Current current = mForecast.getCurrent();
mTemperatureLabel.setText(current.getTemperature() + "");
mTimeLabel.setText("At " + current.getFormattedTime() + "the temperature will be");
mSummaryLabel.setText(current.getSummary() + "");
mHumidityValue.setText(current.getHumidity() + "");
mPrecipValue.setText(current.getPercipChance() + "%");
Drawable drawable = getResources().getDrawable(current.getIconId());
mIconView.setImageDrawable(drawable);
}
private Forecast parseForecastDetails(String jsonData) throws JSONException {
Forecast forecast = new Forecast();
forecast.setCurrent(getCurrentDetails(jsonData));
forecast.setDailyForecast(getDailyForecast(jsonData));
forecast.setHourlyForecast(getHourlyForecast(jsonData));
return forecast;
}
private Hour[] getHourlyForecast(String jsonData) throws JSONException{
JSONObject forecast = new JSONObject(jsonData);
String timezone = forecast.getString("timezone");
JSONObject hourly = forecast.getJSONObject("hourly");
JSONArray data = hourly.getJSONArray("data");
Hour[] hours = new Hour[data.length()];
for(int i = 0; i<data.length(); i++) {
JSONObject jsonHour = data.getJSONObject(i);
Hour hour = new Hour();
hour.setSummary(jsonHour.getString("summary"));
hour.setTemperature(jsonHour.getDouble("temperature"));
hour.setTime(jsonHour.getLong("time"));
hour.setIcon(jsonHour.getString("icon"));
hour.setTimezone(timezone);
hours[i] = hour;
}
return hours;
}
private Day[] getDailyForecast(String jsonData) throws JSONException{
JSONObject forecast = new JSONObject(jsonData);
String timezone = forecast.getString("timezone");
JSONObject daily = forecast.getJSONObject("daily");
JSONArray data = daily.getJSONArray("data");
Day[] days = new Day[data.length()];
for(int i = 0; i<data.length(); i++) {
JSONObject jsonDay = data.getJSONObject(i);
Day day = new Day();
day.setTimezone(timezone);
day.setTime(jsonDay.getLong("time"));
day.setIcon(jsonDay.getString("icon"));
day.setSummary(jsonDay.getString("summary"));
day.setTemperatureMax(jsonDay.getDouble("temperatureMax"));
days[i] = day;
}
return days;
}
private Current getCurrentDetails(String jsonData) throws JSONException{
JSONObject forecast = new JSONObject(jsonData);
String timezone = forecast.getString("timezone");
Log.i(TAG, "FROM JSON:" + timezone);
JSONObject currently = forecast.getJSONObject("currently");
Current current = new Current();
current.setHumidity(currently.getDouble("humidity"));
current.setTime(currently.getLong("time"));
current.setIcon(currently.getString("icon"));
current.setPercipChance(currently.getDouble("precipProbability"));
current.setSummary(currently.getString("summary"));
current.setTemperature(currently.getDouble("temperature"));
current.setTimeZone(timezone);
Log.d(TAG, current.getFormattedTime());
return current;
}
private boolean isNetwrokAvailable() {
ConnectivityManager manager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = manager.getActiveNetworkInfo();
boolean isAvailable = false;
if(networkInfo != null && networkInfo.isConnected() ) {
isAvailable = true;
}
return isAvailable;
}
private void alertUserAboutError() {
AlertDialogFragment dialog = new AlertDialogFragment();
dialog.show(getFragmentManager(), "get error Message");
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableConstructor;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.date.DayCount;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.collect.array.DoubleArray;
import com.opengamma.strata.data.MarketDataName;
import com.opengamma.strata.market.ValueType;
import com.opengamma.strata.market.curve.Curve;
import com.opengamma.strata.market.curve.CurveInfoType;
import com.opengamma.strata.market.curve.InterpolatedNodalCurve;
import com.opengamma.strata.market.param.CurrencyParameterSensitivities;
import com.opengamma.strata.market.param.CurrencyParameterSensitivity;
import com.opengamma.strata.market.param.ParameterMetadata;
import com.opengamma.strata.market.param.ParameterPerturbation;
import com.opengamma.strata.market.param.UnitParameterSensitivity;
/**
* Provides access to discount factors for a currency based on a zero rate periodically-compounded curve.
* <p>
* This provides discount factors for a single currency.
* <p>
* This implementation is based on an underlying curve that is stored with maturities
* and zero-coupon periodically-compounded rates.
*/
@BeanDefinition(builderScope = "private")
public final class ZeroRatePeriodicDiscountFactors
implements DiscountFactors, ImmutableBean, Serializable {
/**
* Year fraction used as an effective zero.
*/
private static final double EFFECTIVE_ZERO = 1e-10;
/**
* The currency that the discount factors are for.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final Currency currency;
/**
* The valuation date.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final LocalDate valuationDate;
/**
* The underlying curve.
* The metadata of the curve must define a day count.
*/
@PropertyDefinition(validate = "notNull")
private final Curve curve;
/**
* The number of compounding periods per year of the zero-coupon rate.
*/
private final transient int frequency; // cached, not a property
/**
* The day count convention of the curve.
*/
private final transient DayCount dayCount; // cached, not a property
//-------------------------------------------------------------------------
/**
* Obtains an instance based on a zero-rates curve.
* <p>
* The curve is specified by an instance of {@link Curve}, such as {@link InterpolatedNodalCurve}.
* The curve must contain {@linkplain ValueType#YEAR_FRACTION year fractions}
* against {@linkplain ValueType#ZERO_RATE zero rates}.
* The day count and compounding periods per year must be present in the metadata.
*
* @param currency the currency
* @param valuationDate the valuation date for which the curve is valid
* @param underlyingCurve the underlying curve
* @return the curve
*/
public static ZeroRatePeriodicDiscountFactors of(Currency currency, LocalDate valuationDate, Curve underlyingCurve) {
return new ZeroRatePeriodicDiscountFactors(currency, valuationDate, underlyingCurve);
}
@ImmutableConstructor
private ZeroRatePeriodicDiscountFactors(
Currency currency,
LocalDate valuationDate,
Curve curve) {
ArgChecker.notNull(currency, "currency");
ArgChecker.notNull(valuationDate, "valuationDate");
ArgChecker.notNull(curve, "curve");
Optional<Integer> frequencyOpt = curve.getMetadata().findInfo(CurveInfoType.COMPOUNDING_PER_YEAR);
ArgChecker.isTrue(frequencyOpt.isPresent(), "Compounding per year must be present for periodicaly compounded curve ");
ArgChecker.isTrue(frequencyOpt.get() > 0, "Compounding per year must be positive");
curve.getMetadata().getXValueType().checkEquals(
ValueType.YEAR_FRACTION, "Incorrect x-value type for zero-rate discount curve");
curve.getMetadata().getYValueType().checkEquals(
ValueType.ZERO_RATE, "Incorrect y-value type for zero-rate discount curve");
DayCount dayCount = curve.getMetadata().findInfo(CurveInfoType.DAY_COUNT)
.orElseThrow(() -> new IllegalArgumentException("Incorrect curve metadata, missing DayCount"));
this.currency = currency;
this.valuationDate = valuationDate;
this.curve = curve;
this.dayCount = dayCount;
this.frequency = frequencyOpt.get();
}
// ensure standard constructor is invoked
private Object readResolve() {
return new ZeroRatePeriodicDiscountFactors(currency, valuationDate, curve);
}
//-------------------------------------------------------------------------
@Override
public <T> Optional<T> findData(MarketDataName<T> name) {
if (curve.getName().equals(name)) {
return Optional.of(name.getMarketDataType().cast(curve));
}
return Optional.empty();
}
@Override
public int getParameterCount() {
return curve.getParameterCount();
}
@Override
public double getParameter(int parameterIndex) {
return curve.getParameter(parameterIndex);
}
@Override
public ParameterMetadata getParameterMetadata(int parameterIndex) {
return curve.getParameterMetadata(parameterIndex);
}
@Override
public ZeroRatePeriodicDiscountFactors withParameter(int parameterIndex, double newValue) {
return withCurve(curve.withParameter(parameterIndex, newValue));
}
@Override
public ZeroRatePeriodicDiscountFactors withPerturbation(ParameterPerturbation perturbation) {
return withCurve(curve.withPerturbation(perturbation));
}
//-------------------------------------------------------------------------
@Override
public double relativeYearFraction(LocalDate date) {
return dayCount.relativeYearFraction(valuationDate, date);
}
@Override
public double discountFactor(double relativeYearFraction) {
// convert zero rate periodically compounded to discount factor
return Math.pow(1d + curve.yValue(relativeYearFraction) / frequency, -relativeYearFraction * frequency);
}
@Override
public double discountFactorTimeDerivative(double yearFraction) {
double zr = curve.yValue(yearFraction);
double periodIF = 1d + zr / frequency;
double df = Math.pow(periodIF, -yearFraction * frequency);
return -frequency * df *
(Math.log(periodIF) + yearFraction / periodIF * curve.firstDerivative(yearFraction) / frequency);
}
@Override
public double zeroRate(double yearFraction) {
double ratePeriod = curve.yValue(yearFraction);
return frequency * Math.log(1d + ratePeriod / frequency);
}
//-------------------------------------------------------------------------
@Override
public ZeroRateSensitivity zeroRatePointSensitivity(double yearFraction, Currency sensitivityCurrency) {
double discountFactor = discountFactor(yearFraction);
return ZeroRateSensitivity.of(currency, yearFraction, sensitivityCurrency, -discountFactor * yearFraction);
}
@Override
public ZeroRateSensitivity zeroRatePointSensitivityWithSpread(
double yearFraction,
Currency sensitivityCurrency,
double zSpread,
CompoundedRateType compoundedRateType,
int periodPerYear) {
if (Math.abs(yearFraction) < EFFECTIVE_ZERO) {
return ZeroRateSensitivity.of(currency, yearFraction, sensitivityCurrency, 0);
}
if (compoundedRateType.equals(CompoundedRateType.CONTINUOUS)) {
double discountFactor = discountFactorWithSpread(yearFraction, zSpread, compoundedRateType, periodPerYear);
return ZeroRateSensitivity.of(currency, yearFraction, sensitivityCurrency, -discountFactor * yearFraction);
}
double df = discountFactor(yearFraction);
double df2 = Math.pow(df, -1.0 / (yearFraction * periodPerYear));
double df3 = df2 + zSpread / periodPerYear;
double ddfSdz = -yearFraction * Math.pow(df3, -yearFraction * periodPerYear - 1) * df2;
return ZeroRateSensitivity.of(currency, yearFraction, sensitivityCurrency, ddfSdz);
}
//-------------------------------------------------------------------------
@Override
public CurrencyParameterSensitivities parameterSensitivity(ZeroRateSensitivity pointSens) {
double yearFraction = pointSens.getYearFraction();
double rp = curve.yValue(yearFraction);
double rcBar = 1.0;
double rpBar = 1.0 / (1 + rp / frequency) * rcBar;
UnitParameterSensitivity unitSens = curve.yValueParameterSensitivity(yearFraction).multipliedBy(rpBar);
CurrencyParameterSensitivity curSens = unitSens.multipliedBy(pointSens.getCurrency(), pointSens.getSensitivity());
return CurrencyParameterSensitivities.of(curSens);
}
@Override
public CurrencyParameterSensitivities createParameterSensitivity(Currency currency, DoubleArray sensitivities) {
return CurrencyParameterSensitivities.of(curve.createParameterSensitivity(currency, sensitivities));
}
//-------------------------------------------------------------------------
/**
* Returns a new instance with a different curve.
*
* @param curve the new curve
* @return the new instance
*/
public ZeroRatePeriodicDiscountFactors withCurve(Curve curve) {
return new ZeroRatePeriodicDiscountFactors(currency, valuationDate, curve);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code ZeroRatePeriodicDiscountFactors}.
* @return the meta-bean, not null
*/
public static ZeroRatePeriodicDiscountFactors.Meta meta() {
return ZeroRatePeriodicDiscountFactors.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ZeroRatePeriodicDiscountFactors.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
@Override
public ZeroRatePeriodicDiscountFactors.Meta metaBean() {
return ZeroRatePeriodicDiscountFactors.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the currency that the discount factors are for.
* @return the value of the property, not null
*/
@Override
public Currency getCurrency() {
return currency;
}
//-----------------------------------------------------------------------
/**
* Gets the valuation date.
* @return the value of the property, not null
*/
@Override
public LocalDate getValuationDate() {
return valuationDate;
}
//-----------------------------------------------------------------------
/**
* Gets the underlying curve.
* The metadata of the curve must define a day count.
* @return the value of the property, not null
*/
public Curve getCurve() {
return curve;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
ZeroRatePeriodicDiscountFactors other = (ZeroRatePeriodicDiscountFactors) obj;
return JodaBeanUtils.equal(currency, other.currency) &&
JodaBeanUtils.equal(valuationDate, other.valuationDate) &&
JodaBeanUtils.equal(curve, other.curve);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(currency);
hash = hash * 31 + JodaBeanUtils.hashCode(valuationDate);
hash = hash * 31 + JodaBeanUtils.hashCode(curve);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("ZeroRatePeriodicDiscountFactors{");
buf.append("currency").append('=').append(currency).append(',').append(' ');
buf.append("valuationDate").append('=').append(valuationDate).append(',').append(' ');
buf.append("curve").append('=').append(JodaBeanUtils.toString(curve));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ZeroRatePeriodicDiscountFactors}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code currency} property.
*/
private final MetaProperty<Currency> currency = DirectMetaProperty.ofImmutable(
this, "currency", ZeroRatePeriodicDiscountFactors.class, Currency.class);
/**
* The meta-property for the {@code valuationDate} property.
*/
private final MetaProperty<LocalDate> valuationDate = DirectMetaProperty.ofImmutable(
this, "valuationDate", ZeroRatePeriodicDiscountFactors.class, LocalDate.class);
/**
* The meta-property for the {@code curve} property.
*/
private final MetaProperty<Curve> curve = DirectMetaProperty.ofImmutable(
this, "curve", ZeroRatePeriodicDiscountFactors.class, Curve.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"currency",
"valuationDate",
"curve");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 575402001: // currency
return currency;
case 113107279: // valuationDate
return valuationDate;
case 95027439: // curve
return curve;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends ZeroRatePeriodicDiscountFactors> builder() {
return new ZeroRatePeriodicDiscountFactors.Builder();
}
@Override
public Class<? extends ZeroRatePeriodicDiscountFactors> beanType() {
return ZeroRatePeriodicDiscountFactors.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code currency} property.
* @return the meta-property, not null
*/
public MetaProperty<Currency> currency() {
return currency;
}
/**
* The meta-property for the {@code valuationDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> valuationDate() {
return valuationDate;
}
/**
* The meta-property for the {@code curve} property.
* @return the meta-property, not null
*/
public MetaProperty<Curve> curve() {
return curve;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 575402001: // currency
return ((ZeroRatePeriodicDiscountFactors) bean).getCurrency();
case 113107279: // valuationDate
return ((ZeroRatePeriodicDiscountFactors) bean).getValuationDate();
case 95027439: // curve
return ((ZeroRatePeriodicDiscountFactors) bean).getCurve();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code ZeroRatePeriodicDiscountFactors}.
*/
private static final class Builder extends DirectFieldsBeanBuilder<ZeroRatePeriodicDiscountFactors> {
private Currency currency;
private LocalDate valuationDate;
private Curve curve;
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 575402001: // currency
return currency;
case 113107279: // valuationDate
return valuationDate;
case 95027439: // curve
return curve;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 575402001: // currency
this.currency = (Currency) newValue;
break;
case 113107279: // valuationDate
this.valuationDate = (LocalDate) newValue;
break;
case 95027439: // curve
this.curve = (Curve) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public ZeroRatePeriodicDiscountFactors build() {
return new ZeroRatePeriodicDiscountFactors(
currency,
valuationDate,
curve);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("ZeroRatePeriodicDiscountFactors.Builder{");
buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' ');
buf.append("valuationDate").append('=').append(JodaBeanUtils.toString(valuationDate)).append(',').append(' ');
buf.append("curve").append('=').append(JodaBeanUtils.toString(curve));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.execution;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListenableFuture;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.execution.warnings.WarningCollector;
import io.trino.metadata.Metadata;
import io.trino.metadata.QualifiedObjectName;
import io.trino.metadata.RedirectionAwareTableHandle;
import io.trino.metadata.TableHandle;
import io.trino.metadata.TableMetadata;
import io.trino.security.AccessControl;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.security.AccessDeniedException;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeNotFoundException;
import io.trino.sql.analyzer.Output;
import io.trino.sql.analyzer.OutputColumn;
import io.trino.sql.tree.ColumnDefinition;
import io.trino.sql.tree.CreateTable;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.LikeClause;
import io.trino.sql.tree.NodeRef;
import io.trino.sql.tree.Parameter;
import io.trino.sql.tree.TableElement;
import io.trino.transaction.TransactionManager;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.util.concurrent.Futures.immediateVoidFuture;
import static io.trino.metadata.MetadataUtil.createQualifiedObjectName;
import static io.trino.metadata.MetadataUtil.getRequiredCatalogHandle;
import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.CATALOG_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.COLUMN_TYPE_UNKNOWN;
import static io.trino.spi.StandardErrorCode.DUPLICATE_COLUMN_NAME;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.StandardErrorCode.TABLE_ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.TABLE_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.TYPE_NOT_FOUND;
import static io.trino.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT;
import static io.trino.sql.NodeUtils.mapFromProperties;
import static io.trino.sql.ParameterUtils.parameterExtractor;
import static io.trino.sql.analyzer.SemanticExceptions.semanticException;
import static io.trino.sql.analyzer.TypeSignatureTranslator.toTypeSignature;
import static io.trino.sql.tree.LikeClause.PropertiesOption.EXCLUDING;
import static io.trino.sql.tree.LikeClause.PropertiesOption.INCLUDING;
import static io.trino.type.UnknownType.UNKNOWN;
import static java.lang.String.format;
public class CreateTableTask
implements DataDefinitionTask<CreateTable>
{
@Override
public String getName()
{
return "CREATE TABLE";
}
@Override
public String explain(CreateTable statement, List<Expression> parameters)
{
return "CREATE TABLE " + statement.getName();
}
@Override
public ListenableFuture<Void> execute(
CreateTable statement,
TransactionManager transactionManager,
Metadata metadata,
AccessControl accessControl,
QueryStateMachine stateMachine,
List<Expression> parameters,
WarningCollector warningCollector)
{
return internalExecute(statement, metadata, accessControl, stateMachine.getSession(), parameters, output -> stateMachine.setOutput(Optional.of(output)));
}
@VisibleForTesting
ListenableFuture<Void> internalExecute(CreateTable statement, Metadata metadata, AccessControl accessControl, Session session, List<Expression> parameters, Consumer<Output> outputConsumer)
{
checkArgument(!statement.getElements().isEmpty(), "no columns for table");
Map<NodeRef<Parameter>, Expression> parameterLookup = parameterExtractor(statement, parameters);
QualifiedObjectName tableName = createQualifiedObjectName(session, statement, statement.getName());
Optional<TableHandle> tableHandle = metadata.getTableHandle(session, tableName);
if (tableHandle.isPresent()) {
if (!statement.isNotExists()) {
throw semanticException(TABLE_ALREADY_EXISTS, statement, "Table '%s' already exists", tableName);
}
return immediateVoidFuture();
}
CatalogName catalogName = getRequiredCatalogHandle(metadata, session, statement, tableName.getCatalogName());
LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<>();
Map<String, Object> inheritedProperties = ImmutableMap.of();
boolean includingProperties = false;
for (TableElement element : statement.getElements()) {
if (element instanceof ColumnDefinition) {
ColumnDefinition column = (ColumnDefinition) element;
String name = column.getName().getValue().toLowerCase(Locale.ENGLISH);
Type type;
try {
type = metadata.getType(toTypeSignature(column.getType()));
}
catch (TypeNotFoundException e) {
throw semanticException(TYPE_NOT_FOUND, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName());
}
if (type.equals(UNKNOWN)) {
throw semanticException(COLUMN_TYPE_UNKNOWN, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName());
}
if (columns.containsKey(name)) {
throw semanticException(DUPLICATE_COLUMN_NAME, column, "Column name '%s' specified more than once", column.getName());
}
if (!column.isNullable() && !metadata.getConnectorCapabilities(session, catalogName).contains(NOT_NULL_COLUMN_CONSTRAINT)) {
throw semanticException(NOT_SUPPORTED, column, "Catalog '%s' does not support non-null column for column name '%s'", catalogName.getCatalogName(), column.getName());
}
Map<String, Expression> sqlProperties = mapFromProperties(column.getProperties());
Map<String, Object> columnProperties = metadata.getColumnPropertyManager().getProperties(
catalogName,
tableName.getCatalogName(),
sqlProperties,
session,
metadata,
accessControl,
parameterLookup);
columns.put(name, ColumnMetadata.builder()
.setName(name)
.setType(type)
.setNullable(column.isNullable())
.setComment(column.getComment())
.setProperties(columnProperties)
.build());
}
else if (element instanceof LikeClause) {
LikeClause likeClause = (LikeClause) element;
QualifiedObjectName originalLikeTableName = createQualifiedObjectName(session, statement, likeClause.getTableName());
if (metadata.getCatalogHandle(session, originalLikeTableName.getCatalogName()).isEmpty()) {
throw semanticException(CATALOG_NOT_FOUND, statement, "LIKE table catalog '%s' does not exist", originalLikeTableName.getCatalogName());
}
RedirectionAwareTableHandle redirection = metadata.getRedirectionAwareTableHandle(session, originalLikeTableName);
TableHandle likeTable = redirection.getTableHandle()
.orElseThrow(() -> semanticException(TABLE_NOT_FOUND, statement, "LIKE table '%s' does not exist", originalLikeTableName));
QualifiedObjectName likeTableName = redirection.getRedirectedTableName().orElse(originalLikeTableName);
if (!tableName.getCatalogName().equals(likeTableName.getCatalogName())) {
String message = "CREATE TABLE LIKE across catalogs is not supported";
if (!originalLikeTableName.equals(likeTableName)) {
message += format(". LIKE table '%s' redirected to '%s'.", originalLikeTableName, likeTableName);
}
throw semanticException(NOT_SUPPORTED, statement, message);
}
TableMetadata likeTableMetadata = metadata.getTableMetadata(session, likeTable);
Optional<LikeClause.PropertiesOption> propertiesOption = likeClause.getPropertiesOption();
if (propertiesOption.isPresent() && propertiesOption.get() == LikeClause.PropertiesOption.INCLUDING) {
if (includingProperties) {
throw semanticException(NOT_SUPPORTED, statement, "Only one LIKE clause can specify INCLUDING PROPERTIES");
}
includingProperties = true;
inheritedProperties = likeTableMetadata.getMetadata().getProperties();
}
try {
accessControl.checkCanSelectFromColumns(
session.toSecurityContext(),
likeTableName,
likeTableMetadata.getColumns().stream()
.map(ColumnMetadata::getName)
.collect(toImmutableSet()));
}
catch (AccessDeniedException e) {
throw new AccessDeniedException("Cannot reference columns of table " + likeTableName);
}
if (propertiesOption.orElse(EXCLUDING) == INCLUDING) {
try {
accessControl.checkCanShowCreateTable(session.toSecurityContext(), likeTableName);
}
catch (AccessDeniedException e) {
throw new AccessDeniedException("Cannot reference properties of table " + likeTableName);
}
}
likeTableMetadata.getColumns().stream()
.filter(column -> !column.isHidden())
.forEach(column -> {
if (columns.containsKey(column.getName().toLowerCase(Locale.ENGLISH))) {
throw semanticException(DUPLICATE_COLUMN_NAME, element, "Column name '%s' specified more than once", column.getName());
}
columns.put(column.getName().toLowerCase(Locale.ENGLISH), column);
});
}
else {
throw new TrinoException(GENERIC_INTERNAL_ERROR, "Invalid TableElement: " + element.getClass().getName());
}
}
accessControl.checkCanCreateTable(session.toSecurityContext(), tableName);
Map<String, Expression> sqlProperties = mapFromProperties(statement.getProperties());
Map<String, Object> properties = metadata.getTablePropertyManager().getProperties(
catalogName,
tableName.getCatalogName(),
sqlProperties,
session,
metadata,
accessControl,
parameterLookup);
Map<String, Object> finalProperties = combineProperties(sqlProperties.keySet(), properties, inheritedProperties);
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName.asSchemaTableName(), ImmutableList.copyOf(columns.values()), finalProperties, statement.getComment());
try {
metadata.createTable(session, tableName.getCatalogName(), tableMetadata, statement.isNotExists());
}
catch (TrinoException e) {
// connectors are not required to handle the ignoreExisting flag
if (!e.getErrorCode().equals(ALREADY_EXISTS.toErrorCode()) || !statement.isNotExists()) {
throw e;
}
}
outputConsumer.accept(new Output(
tableName.getCatalogName(),
tableName.getSchemaName(),
tableName.getObjectName(),
Optional.of(tableMetadata.getColumns().stream()
.map(column -> new OutputColumn(new Column(column.getName(), column.getType().toString()), ImmutableSet.of()))
.collect(toImmutableList()))));
return immediateVoidFuture();
}
private static Map<String, Object> combineProperties(Set<String> specifiedPropertyKeys, Map<String, Object> defaultProperties, Map<String, Object> inheritedProperties)
{
Map<String, Object> finalProperties = new HashMap<>(inheritedProperties);
for (Map.Entry<String, Object> entry : defaultProperties.entrySet()) {
if (specifiedPropertyKeys.contains(entry.getKey()) || !finalProperties.containsKey(entry.getKey())) {
finalProperties.put(entry.getKey(), entry.getValue());
}
}
return finalProperties;
}
}
| |
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.junit.jupiter.api.Test;
import org.springframework.beans.propertyeditors.CustomNumberEditor;
import org.springframework.beans.propertyeditors.StringTrimmerEditor;
import org.springframework.beans.testfixture.beans.GenericBean;
import org.springframework.beans.testfixture.beans.GenericIntegerBean;
import org.springframework.beans.testfixture.beans.GenericSetOfIntegerBean;
import org.springframework.beans.testfixture.beans.TestBean;
import org.springframework.core.io.UrlResource;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
/**
* @author Juergen Hoeller
* @author Chris Beams
* @since 18.01.2006
*/
class BeanWrapperGenericsTests {
@Test
void testGenericSet() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
Set<String> input = new HashSet<>();
input.add("4");
input.add("5");
bw.setPropertyValue("integerSet", input);
assertThat(gb.getIntegerSet().contains(4)).isTrue();
assertThat(gb.getIntegerSet().contains(5)).isTrue();
}
@Test
void testGenericLowerBoundedSet() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.registerCustomEditor(Number.class, new CustomNumberEditor(Integer.class, true));
Set<String> input = new HashSet<>();
input.add("4");
input.add("5");
bw.setPropertyValue("numberSet", input);
assertThat(gb.getNumberSet().contains(4)).isTrue();
assertThat(gb.getNumberSet().contains(5)).isTrue();
}
@Test
void testGenericSetWithConversionFailure() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
Set<TestBean> input = new HashSet<>();
input.add(new TestBean());
assertThatExceptionOfType(TypeMismatchException.class).isThrownBy(() ->
bw.setPropertyValue("integerSet", input))
.withMessageContaining("java.lang.Integer");
}
@Test
void testGenericList() throws Exception {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
List<String> input = new ArrayList<>();
input.add("http://localhost:8080");
input.add("http://localhost:9090");
bw.setPropertyValue("resourceList", input);
assertThat(gb.getResourceList().get(0)).isEqualTo(new UrlResource("http://localhost:8080"));
assertThat(gb.getResourceList().get(1)).isEqualTo(new UrlResource("http://localhost:9090"));
}
@Test
void testGenericListElement() throws Exception {
GenericBean<?> gb = new GenericBean<>();
gb.setResourceList(new ArrayList<>());
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("resourceList[0]", "http://localhost:8080");
assertThat(gb.getResourceList().get(0)).isEqualTo(new UrlResource("http://localhost:8080"));
}
@Test
void testGenericMap() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
Map<String, String> input = new HashMap<>();
input.put("4", "5");
input.put("6", "7");
bw.setPropertyValue("shortMap", input);
assertThat(gb.getShortMap().get(Short.valueOf("4"))).isEqualTo(5);
assertThat(gb.getShortMap().get(Short.valueOf("6"))).isEqualTo(7);
}
@Test
void testGenericMapElement() {
GenericBean<?> gb = new GenericBean<>();
gb.setShortMap(new HashMap<>());
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("shortMap[4]", "5");
assertThat(bw.getPropertyValue("shortMap[4]")).isEqualTo(5);
assertThat(gb.getShortMap().get(Short.valueOf("4"))).isEqualTo(5);
}
@Test
void testGenericMapWithKeyType() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
Map<String, String> input = new HashMap<>();
input.put("4", "5");
input.put("6", "7");
bw.setPropertyValue("longMap", input);
assertThat(gb.getLongMap().get(4L)).isEqualTo("5");
assertThat(gb.getLongMap().get(6L)).isEqualTo("7");
}
@Test
void testGenericMapElementWithKeyType() {
GenericBean<?> gb = new GenericBean<>();
gb.setLongMap(new HashMap<Long, Integer>());
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("longMap[4]", "5");
assertThat(gb.getLongMap().get(Long.valueOf("4"))).isEqualTo("5");
assertThat(bw.getPropertyValue("longMap[4]")).isEqualTo("5");
}
@Test
void testGenericMapWithCollectionValue() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.registerCustomEditor(Number.class, new CustomNumberEditor(Integer.class, false));
Map<String, Collection<?>> input = new HashMap<>();
HashSet<Integer> value1 = new HashSet<>();
value1.add(1);
input.put("1", value1);
ArrayList<Boolean> value2 = new ArrayList<>();
value2.add(Boolean.TRUE);
input.put("2", value2);
bw.setPropertyValue("collectionMap", input);
assertThat(gb.getCollectionMap().get(1) instanceof HashSet).isTrue();
assertThat(gb.getCollectionMap().get(2) instanceof ArrayList).isTrue();
}
@Test
void testGenericMapElementWithCollectionValue() {
GenericBean<?> gb = new GenericBean<>();
gb.setCollectionMap(new HashMap<>());
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.registerCustomEditor(Number.class, new CustomNumberEditor(Integer.class, false));
HashSet<Integer> value1 = new HashSet<>();
value1.add(1);
bw.setPropertyValue("collectionMap[1]", value1);
assertThat(gb.getCollectionMap().get(1) instanceof HashSet).isTrue();
}
@Test
void testGenericMapFromProperties() {
GenericBean<?> gb = new GenericBean<>();
BeanWrapper bw = new BeanWrapperImpl(gb);
Properties input = new Properties();
input.setProperty("4", "5");
input.setProperty("6", "7");
bw.setPropertyValue("shortMap", input);
assertThat(gb.getShortMap().get(Short.valueOf("4"))).isEqualTo(5);
assertThat(gb.getShortMap().get(Short.valueOf("6"))).isEqualTo(7);
}
@Test
void testGenericListOfLists() {
GenericBean<String> gb = new GenericBean<>();
List<List<Integer>> list = new ArrayList<>();
list.add(new ArrayList<>());
gb.setListOfLists(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfLists[0][0]", 5);
assertThat(bw.getPropertyValue("listOfLists[0][0]")).isEqualTo(5);
assertThat(gb.getListOfLists().get(0).get(0)).isEqualTo(5);
}
@Test
void testGenericListOfListsWithElementConversion() {
GenericBean<String> gb = new GenericBean<>();
List<List<Integer>> list = new ArrayList<>();
list.add(new ArrayList<>());
gb.setListOfLists(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfLists[0][0]", "5");
assertThat(bw.getPropertyValue("listOfLists[0][0]")).isEqualTo(5);
assertThat(gb.getListOfLists().get(0).get(0)).isEqualTo(5);
}
@Test
void testGenericListOfArrays() {
GenericBean<String> gb = new GenericBean<>();
ArrayList<String[]> list = new ArrayList<>();
list.add(new String[] {"str1", "str2"});
gb.setListOfArrays(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfArrays[0][1]", "str3 ");
assertThat(bw.getPropertyValue("listOfArrays[0][1]")).isEqualTo("str3 ");
assertThat(gb.getListOfArrays().get(0)[1]).isEqualTo("str3 ");
}
@Test
void testGenericListOfArraysWithElementConversion() {
GenericBean<String> gb = new GenericBean<>();
ArrayList<String[]> list = new ArrayList<>();
list.add(new String[] {"str1", "str2"});
gb.setListOfArrays(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.registerCustomEditor(String.class, new StringTrimmerEditor(false));
bw.setPropertyValue("listOfArrays[0][1]", "str3 ");
assertThat(bw.getPropertyValue("listOfArrays[0][1]")).isEqualTo("str3");
assertThat(gb.getListOfArrays().get(0)[1]).isEqualTo("str3");
}
@Test
void testGenericListOfMaps() {
GenericBean<String> gb = new GenericBean<>();
List<Map<Integer, Long>> list = new ArrayList<>();
list.add(new HashMap<>());
gb.setListOfMaps(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfMaps[0][10]", 5L);
assertThat(bw.getPropertyValue("listOfMaps[0][10]")).isEqualTo(5L);
assertThat(gb.getListOfMaps().get(0).get(10)).isEqualTo(Long.valueOf(5));
}
@Test
void testGenericListOfMapsWithElementConversion() {
GenericBean<String> gb = new GenericBean<>();
List<Map<Integer, Long>> list = new ArrayList<>();
list.add(new HashMap<>());
gb.setListOfMaps(list);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfMaps[0][10]", "5");
assertThat(bw.getPropertyValue("listOfMaps[0][10]")).isEqualTo(5L);
assertThat(gb.getListOfMaps().get(0).get(10)).isEqualTo(Long.valueOf(5));
}
@Test
void testGenericMapOfMaps() {
GenericBean<String> gb = new GenericBean<>();
Map<String, Map<Integer, Long>> map = new HashMap<>();
map.put("mykey", new HashMap<>());
gb.setMapOfMaps(map);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfMaps[mykey][10]", 5L);
assertThat(bw.getPropertyValue("mapOfMaps[mykey][10]")).isEqualTo(5L);
assertThat(gb.getMapOfMaps().get("mykey").get(10)).isEqualTo(Long.valueOf(5));
}
@Test
void testGenericMapOfMapsWithElementConversion() {
GenericBean<String> gb = new GenericBean<>();
Map<String, Map<Integer, Long>> map = new HashMap<>();
map.put("mykey", new HashMap<>());
gb.setMapOfMaps(map);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfMaps[mykey][10]", "5");
assertThat(bw.getPropertyValue("mapOfMaps[mykey][10]")).isEqualTo(Long.valueOf(5));
assertThat(gb.getMapOfMaps().get("mykey").get(10)).isEqualTo(Long.valueOf(5));
}
@Test
void testGenericMapOfLists() {
GenericBean<String> gb = new GenericBean<>();
Map<Integer, List<Integer>> map = new HashMap<>();
map.put(1, new ArrayList<>());
gb.setMapOfLists(map);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfLists[1][0]", 5);
assertThat(bw.getPropertyValue("mapOfLists[1][0]")).isEqualTo(5);
assertThat(gb.getMapOfLists().get(1).get(0)).isEqualTo(5);
}
@Test
void testGenericMapOfListsWithElementConversion() {
GenericBean<String> gb = new GenericBean<>();
Map<Integer, List<Integer>> map = new HashMap<>();
map.put(1, new ArrayList<>());
gb.setMapOfLists(map);
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfLists[1][0]", "5");
assertThat(bw.getPropertyValue("mapOfLists[1][0]")).isEqualTo(5);
assertThat(gb.getMapOfLists().get(1).get(0)).isEqualTo(5);
}
@Test
void testGenericTypeNestingMapOfInteger() {
Map<String, String> map = new HashMap<>();
map.put("testKey", "100");
NestedGenericCollectionBean gb = new NestedGenericCollectionBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfInteger", map);
Object obj = gb.getMapOfInteger().get("testKey");
assertThat(obj instanceof Integer).isTrue();
}
@Test
void testGenericTypeNestingMapOfListOfInteger() {
Map<String, List<String>> map = new HashMap<>();
List<String> list = Arrays.asList("1", "2", "3");
map.put("testKey", list);
NestedGenericCollectionBean gb = new NestedGenericCollectionBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfListOfInteger", map);
Object obj = gb.getMapOfListOfInteger().get("testKey").get(0);
assertThat(obj instanceof Integer).isTrue();
assertThat(((Integer) obj).intValue()).isEqualTo(1);
}
@Test
void testGenericTypeNestingListOfMapOfInteger() {
List<Map<String, String>> list = new ArrayList<>();
Map<String, String> map = new HashMap<>();
map.put("testKey", "5");
list.add(map);
NestedGenericCollectionBean gb = new NestedGenericCollectionBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("listOfMapOfInteger", list);
Object obj = gb.getListOfMapOfInteger().get(0).get("testKey");
assertThat(obj instanceof Integer).isTrue();
assertThat(((Integer) obj).intValue()).isEqualTo(5);
}
@Test
void testGenericTypeNestingMapOfListOfListOfInteger() {
Map<String, List<List<String>>> map = new HashMap<>();
List<String> list = Arrays.asList("1", "2", "3");
map.put("testKey", Collections.singletonList(list));
NestedGenericCollectionBean gb = new NestedGenericCollectionBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("mapOfListOfListOfInteger", map);
Object obj = gb.getMapOfListOfListOfInteger().get("testKey").get(0).get(0);
assertThat(obj instanceof Integer).isTrue();
assertThat(((Integer) obj).intValue()).isEqualTo(1);
}
@Test
void testComplexGenericMap() {
Map<List<String>, List<String>> inputMap = new HashMap<>();
List<String> inputKey = new ArrayList<>();
inputKey.add("1");
List<String> inputValue = new ArrayList<>();
inputValue.add("10");
inputMap.put(inputKey, inputValue);
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("genericMap", inputMap);
assertThat(holder.getGenericMap().keySet().iterator().next().get(0)).isEqualTo(1);
assertThat(holder.getGenericMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testComplexGenericMapWithCollectionConversion() {
Map<Set<String>, Set<String>> inputMap = new HashMap<>();
Set<String> inputKey = new HashSet<>();
inputKey.add("1");
Set<String> inputValue = new HashSet<>();
inputValue.add("10");
inputMap.put(inputKey, inputValue);
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("genericMap", inputMap);
assertThat(holder.getGenericMap().keySet().iterator().next().get(0)).isEqualTo(1);
assertThat(holder.getGenericMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testComplexGenericIndexedMapEntry() {
List<String> inputValue = new ArrayList<>();
inputValue.add("10");
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("genericIndexedMap[1]", inputValue);
assertThat(holder.getGenericIndexedMap().keySet().iterator().next()).isEqualTo(1);
assertThat(holder.getGenericIndexedMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testComplexGenericIndexedMapEntryWithCollectionConversion() {
Set<String> inputValue = new HashSet<>();
inputValue.add("10");
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("genericIndexedMap[1]", inputValue);
assertThat(holder.getGenericIndexedMap().keySet().iterator().next()).isEqualTo(1);
assertThat(holder.getGenericIndexedMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testComplexDerivedIndexedMapEntry() {
List<String> inputValue = new ArrayList<>();
inputValue.add("10");
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("derivedIndexedMap[1]", inputValue);
assertThat(holder.getDerivedIndexedMap().keySet().iterator().next()).isEqualTo(1);
assertThat(holder.getDerivedIndexedMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testComplexDerivedIndexedMapEntryWithCollectionConversion() {
Set<String> inputValue = new HashSet<>();
inputValue.add("10");
ComplexMapHolder holder = new ComplexMapHolder();
BeanWrapper bw = new BeanWrapperImpl(holder);
bw.setPropertyValue("derivedIndexedMap[1]", inputValue);
assertThat(holder.getDerivedIndexedMap().keySet().iterator().next()).isEqualTo(1);
assertThat(holder.getDerivedIndexedMap().values().iterator().next().get(0)).isEqualTo(Long.valueOf(10));
}
@Test
void testGenericallyTypedIntegerBean() {
GenericIntegerBean gb = new GenericIntegerBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("genericProperty", "10");
bw.setPropertyValue("genericListProperty", new String[] {"20", "30"});
assertThat(gb.getGenericProperty()).isEqualTo(10);
assertThat(gb.getGenericListProperty().get(0)).isEqualTo(20);
assertThat(gb.getGenericListProperty().get(1)).isEqualTo(30);
}
@Test
void testGenericallyTypedSetOfIntegerBean() {
GenericSetOfIntegerBean gb = new GenericSetOfIntegerBean();
BeanWrapper bw = new BeanWrapperImpl(gb);
bw.setPropertyValue("genericProperty", "10");
bw.setPropertyValue("genericListProperty", new String[] {"20", "30"});
assertThat(gb.getGenericProperty().iterator().next()).isEqualTo(10);
assertThat(gb.getGenericListProperty().get(0).iterator().next()).isEqualTo(20);
assertThat(gb.getGenericListProperty().get(1).iterator().next()).isEqualTo(30);
}
@Test
void testSettingGenericPropertyWithReadOnlyInterface() {
Bar bar = new Bar();
BeanWrapper bw = new BeanWrapperImpl(bar);
bw.setPropertyValue("version", "10");
assertThat(bar.getVersion()).isEqualTo(Double.valueOf(10.0));
}
@Test
void testSettingLongPropertyWithGenericInterface() {
Promotion bean = new Promotion();
BeanWrapper bw = new BeanWrapperImpl(bean);
bw.setPropertyValue("id", "10");
assertThat(bean.getId()).isEqualTo(Long.valueOf(10));
}
@Test
void testUntypedPropertyWithMapAtRuntime() {
class Holder<D> {
private final D data;
public Holder(D data) {
this.data = data;
}
@SuppressWarnings("unused")
public D getData() {
return this.data;
}
}
Map<String, Object> data = new HashMap<>();
data.put("x", "y");
Holder<Map<String, Object>> context = new Holder<>(data);
BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(context);
assertThat(bw.getPropertyValue("data['x']")).isEqualTo("y");
bw.setPropertyValue("data['message']", "it works!");
assertThat(data.get("message")).isEqualTo("it works!");
}
private static abstract class BaseGenericCollectionBean {
public abstract Object getMapOfInteger();
public abstract Map<String, List<Integer>> getMapOfListOfInteger();
public abstract void setMapOfListOfInteger(Map<String, List<Integer>> mapOfListOfInteger);
}
@SuppressWarnings("unused")
private static class NestedGenericCollectionBean extends BaseGenericCollectionBean {
private Map<String, Integer> mapOfInteger;
private Map<String, List<Integer>> mapOfListOfInteger;
private List<Map<String, Integer>> listOfMapOfInteger;
private Map<String, List<List<Integer>>> mapOfListOfListOfInteger;
@Override
public Map<String, Integer> getMapOfInteger() {
return mapOfInteger;
}
public void setMapOfInteger(Map<String, Integer> mapOfInteger) {
this.mapOfInteger = mapOfInteger;
}
@Override
public Map<String, List<Integer>> getMapOfListOfInteger() {
return mapOfListOfInteger;
}
@Override
public void setMapOfListOfInteger(Map<String, List<Integer>> mapOfListOfInteger) {
this.mapOfListOfInteger = mapOfListOfInteger;
}
public List<Map<String, Integer>> getListOfMapOfInteger() {
return listOfMapOfInteger;
}
public void setListOfMapOfInteger(List<Map<String, Integer>> listOfMapOfInteger) {
this.listOfMapOfInteger = listOfMapOfInteger;
}
public Map<String, List<List<Integer>>> getMapOfListOfListOfInteger() {
return mapOfListOfListOfInteger;
}
public void setMapOfListOfListOfInteger(Map<String, List<List<Integer>>> mapOfListOfListOfInteger) {
this.mapOfListOfListOfInteger = mapOfListOfListOfInteger;
}
}
@SuppressWarnings("unused")
private static class ComplexMapHolder {
private Map<List<Integer>, List<Long>> genericMap;
private Map<Integer, List<Long>> genericIndexedMap = new HashMap<>();
private DerivedMap derivedIndexedMap = new DerivedMap();
public void setGenericMap(Map<List<Integer>, List<Long>> genericMap) {
this.genericMap = genericMap;
}
public Map<List<Integer>, List<Long>> getGenericMap() {
return genericMap;
}
public void setGenericIndexedMap(Map<Integer, List<Long>> genericIndexedMap) {
this.genericIndexedMap = genericIndexedMap;
}
public Map<Integer, List<Long>> getGenericIndexedMap() {
return genericIndexedMap;
}
public void setDerivedIndexedMap(DerivedMap derivedIndexedMap) {
this.derivedIndexedMap = derivedIndexedMap;
}
public DerivedMap getDerivedIndexedMap() {
return derivedIndexedMap;
}
}
@SuppressWarnings("serial")
private static class DerivedMap extends HashMap<Integer, List<Long>> {
}
public interface Foo {
Number getVersion();
}
public class Bar implements Foo {
private double version;
@Override
public Double getVersion() {
return this.version;
}
public void setVersion(Double theDouble) {
this.version = theDouble;
}
}
public interface ObjectWithId<T extends Comparable<T>> {
T getId();
void setId(T aId);
}
public class Promotion implements ObjectWithId<Long> {
private Long id;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long aId) {
this.id = aId;
}
}
}
| |
package com.flying.xiao.widget;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RoundRectShape;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewParent;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.DecelerateInterpolator;
import android.widget.FrameLayout;
import android.widget.TabWidget;
import android.widget.TextView;
/**
* A simple text label view that can be applied as a "badge" to any given {@link android.view.View}.
* This class is intended to be instantiated at runtime rather than included in XML layouts.
*
* @author Jeff Gilfelt
*/
public class BadgeView extends TextView {
public static final int POSITION_TOP_LEFT = 1;
public static final int POSITION_TOP_RIGHT = 2;
public static final int POSITION_BOTTOM_LEFT = 3;
public static final int POSITION_BOTTOM_RIGHT = 4;
public static final int POSITION_CENTER = 5;
private static final int DEFAULT_MARGIN_DIP = 5;
private static final int DEFAULT_LR_PADDING_DIP = 5;
private static final int DEFAULT_CORNER_RADIUS_DIP = 8;
private static final int DEFAULT_POSITION = POSITION_TOP_RIGHT;
private static final int DEFAULT_BADGE_COLOR = Color.parseColor("#CCFF0000"); //Color.RED;
private static final int DEFAULT_TEXT_COLOR = Color.WHITE;
private static Animation fadeIn;
private static Animation fadeOut;
private Context context;
private View target;
private int badgePosition;
private int badgeMarginH;
private int badgeMarginV;
private int badgeColor;
private boolean isShown;
private ShapeDrawable badgeBg;
private int targetTabIndex;
public BadgeView(Context context) {
this(context, (AttributeSet) null, android.R.attr.textViewStyle);
}
public BadgeView(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.textViewStyle);
}
/**
* Constructor -
*
* create a new BadgeView instance attached to a target {@link android.view.View}.
*
* @param context context for this view.
* @param target the View to attach the badge to.
*/
public BadgeView(Context context, View target) {
this(context, null, android.R.attr.textViewStyle, target, 0);
}
/**
* Constructor -
*
* create a new BadgeView instance attached to a target {@link android.widget.TabWidget}
* tab at a given index.
*
* @param context context for this view.
* @param target the TabWidget to attach the badge to.
* @param index the position of the tab within the target.
*/
public BadgeView(Context context, TabWidget target, int index) {
this(context, null, android.R.attr.textViewStyle, target, index);
}
public BadgeView(Context context, AttributeSet attrs, int defStyle) {
this(context, attrs, defStyle, null, 0);
}
public BadgeView(Context context, AttributeSet attrs, int defStyle, View target, int tabIndex) {
super(context, attrs, defStyle);
init(context, target, tabIndex);
}
private void init(Context context, View target, int tabIndex) {
this.context = context;
this.target = target;
this.targetTabIndex = tabIndex;
// apply defaults
badgePosition = DEFAULT_POSITION;
badgeMarginH = dipToPixels(DEFAULT_MARGIN_DIP);
badgeMarginV = badgeMarginH;
badgeColor = DEFAULT_BADGE_COLOR;
setTypeface(Typeface.DEFAULT_BOLD);
int paddingPixels = dipToPixels(DEFAULT_LR_PADDING_DIP);
setPadding(paddingPixels, 0, paddingPixels, 0);
setTextColor(DEFAULT_TEXT_COLOR);
fadeIn = new AlphaAnimation(0, 1);
fadeIn.setInterpolator(new DecelerateInterpolator());
fadeIn.setDuration(200);
fadeOut = new AlphaAnimation(1, 0);
fadeOut.setInterpolator(new AccelerateInterpolator());
fadeOut.setDuration(200);
isShown = false;
if (this.target != null) {
applyTo(this.target);
} else {
show();
}
}
private void applyTo(View target) {
LayoutParams lp = target.getLayoutParams();
ViewParent parent = target.getParent();
FrameLayout container = new FrameLayout(context);
if (target instanceof TabWidget) {
// set target to the relevant tab child container
target = ((TabWidget) target).getChildTabViewAt(targetTabIndex);
this.target = target;
((ViewGroup) target).addView(container,
new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
this.setVisibility(View.GONE);
container.addView(this);
} else {
// TODO verify that parent is indeed a ViewGroup
ViewGroup group = (ViewGroup) parent;
int index = group.indexOfChild(target);
group.removeView(target);
group.addView(container, index, lp);
container.addView(target);
this.setVisibility(View.GONE);
container.addView(this);
group.invalidate();
}
}
/**
* Make the badge visible in the UI.
*
*/
public void show() {
show(false, null);
}
/**
* Make the badge visible in the UI.
*
* @param animate flag to apply the default fade-in animation.
*/
public void show(boolean animate) {
show(animate, fadeIn);
}
/**
* Make the badge visible in the UI.
*
* @param anim Animation to apply to the view when made visible.
*/
public void show(Animation anim) {
show(true, anim);
}
/**
* Make the badge non-visible in the UI.
*
*/
public void hide() {
hide(false, null);
}
/**
* Make the badge non-visible in the UI.
*
* @param animate flag to apply the default fade-out animation.
*/
public void hide(boolean animate) {
hide(animate, fadeOut);
}
/**
* Make the badge non-visible in the UI.
*
* @param anim Animation to apply to the view when made non-visible.
*/
public void hide(Animation anim) {
hide(true, anim);
}
/**
* Toggle the badge visibility in the UI.
*
*/
public void toggle() {
toggle(false, null, null);
}
/**
* Toggle the badge visibility in the UI.
*
* @param animate flag to apply the default fade-in/out animation.
*/
public void toggle(boolean animate) {
toggle(animate, fadeIn, fadeOut);
}
/**
* Toggle the badge visibility in the UI.
*
* @param animIn Animation to apply to the view when made visible.
* @param animOut Animation to apply to the view when made non-visible.
*/
public void toggle(Animation animIn, Animation animOut) {
toggle(true, animIn, animOut);
}
private void show(boolean animate, Animation anim) {
if (getBackground() == null) {
if (badgeBg == null) {
badgeBg = getDefaultBackground();
}
setBackgroundDrawable(badgeBg);
}
applyLayoutParams();
if (animate) {
this.startAnimation(anim);
}
this.setVisibility(View.VISIBLE);
isShown = true;
}
private void hide(boolean animate, Animation anim) {
this.setVisibility(View.GONE);
if (animate) {
this.startAnimation(anim);
}
isShown = false;
}
private void toggle(boolean animate, Animation animIn, Animation animOut) {
if (isShown) {
hide(animate && (animOut != null), animOut);
} else {
show(animate && (animIn != null), animIn);
}
}
/**
* Increment the numeric badge label. If the current badge label cannot be converted to
* an integer value, its label will be set to "0".
*
* @param offset the increment offset.
*/
public int increment(int offset) {
CharSequence txt = getText();
int i;
if (txt != null) {
try {
i = Integer.parseInt(txt.toString());
} catch (NumberFormatException e) {
i = 0;
}
} else {
i = 0;
}
i = i + offset;
setText(String.valueOf(i));
return i;
}
/**
* Decrement the numeric badge label. If the current badge label cannot be converted to
* an integer value, its label will be set to "0".
*
* @param offset the decrement offset.
*/
public int decrement(int offset) {
return increment(-offset);
}
private ShapeDrawable getDefaultBackground() {
int r = dipToPixels(DEFAULT_CORNER_RADIUS_DIP);
float[] outerR = new float[] {r, r, r, r, r, r, r, r};
RoundRectShape rr = new RoundRectShape(outerR, null, null);
ShapeDrawable drawable = new ShapeDrawable(rr);
drawable.getPaint().setColor(badgeColor);
return drawable;
}
private void applyLayoutParams() {
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
switch (badgePosition) {
case POSITION_TOP_LEFT:
lp.gravity = Gravity.LEFT | Gravity.TOP;
lp.setMargins(badgeMarginH, badgeMarginV, 0, 0);
break;
case POSITION_TOP_RIGHT:
lp.gravity = Gravity.RIGHT | Gravity.TOP;
lp.setMargins(0, badgeMarginV, badgeMarginH, 0);
break;
case POSITION_BOTTOM_LEFT:
lp.gravity = Gravity.LEFT | Gravity.BOTTOM;
lp.setMargins(badgeMarginH, 0, 0, badgeMarginV);
break;
case POSITION_BOTTOM_RIGHT:
lp.gravity = Gravity.RIGHT | Gravity.BOTTOM;
lp.setMargins(0, 0, badgeMarginH, badgeMarginV);
break;
case POSITION_CENTER:
lp.gravity = Gravity.CENTER;
lp.setMargins(0, 0, 0, 0);
break;
default:
break;
}
setLayoutParams(lp);
}
/**
* Returns the target View this badge has been attached to.
*
*/
public View getTarget() {
return target;
}
/**
* Is this badge currently visible in the UI?
*
*/
@Override
public boolean isShown() {
return isShown;
}
/**
* Returns the positioning of this badge.
*
* one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, POSITION_BOTTOM_LEFT, POSITION_BOTTOM_RIGHT, POSTION_CENTER.
*
*/
public int getBadgePosition() {
return badgePosition;
}
/**
* Set the positioning of this badge.
*
* @param layoutPosition one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, POSITION_BOTTOM_LEFT, POSITION_BOTTOM_RIGHT, POSTION_CENTER.
*
*/
public void setBadgePosition(int layoutPosition) {
this.badgePosition = layoutPosition;
}
/**
* Returns the horizontal margin from the target View that is applied to this badge.
*
*/
public int getHorizontalBadgeMargin() {
return badgeMarginH;
}
/**
* Returns the vertical margin from the target View that is applied to this badge.
*
*/
public int getVerticalBadgeMargin() {
return badgeMarginV;
}
/**
* Set the horizontal/vertical margin from the target View that is applied to this badge.
*
* @param badgeMargin the margin in pixels.
*/
public void setBadgeMargin(int badgeMargin) {
this.badgeMarginH = badgeMargin;
this.badgeMarginV = badgeMargin;
}
/**
* Set the horizontal/vertical margin from the target View that is applied to this badge.
*
* @param horizontal margin in pixels.
* @param vertical margin in pixels.
*/
public void setBadgeMargin(int horizontal, int vertical) {
this.badgeMarginH = horizontal;
this.badgeMarginV = vertical;
}
/**
* Returns the color value of the badge background.
*
*/
public int getBadgeBackgroundColor() {
return badgeColor;
}
/**
* Set the color value of the badge background.
*
* @param badgeColor the badge background color.
*/
public void setBadgeBackgroundColor(int badgeColor) {
this.badgeColor = badgeColor;
badgeBg = getDefaultBackground();
}
private int dipToPixels(int dip) {
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dip, r.getDisplayMetrics());
return (int) px;
}
}
| |
package cz.metacentrum.perun.webgui.json.columnProviders;
import com.google.gwt.cell.client.FieldUpdater;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.Style;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.view.client.ListDataProvider;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonCallbackTable;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.comparators.GeneralComparator;
import cz.metacentrum.perun.webgui.json.comparators.RichMemberComparator;
import cz.metacentrum.perun.webgui.json.membersManager.SetStatus;
import cz.metacentrum.perun.webgui.model.GeneralObject;
import cz.metacentrum.perun.webgui.model.Member;
import cz.metacentrum.perun.webgui.model.PerunError;
import cz.metacentrum.perun.webgui.model.RichMember;
import cz.metacentrum.perun.webgui.tabs.MembersTabs;
import cz.metacentrum.perun.webgui.tabs.UrlMapper;
import cz.metacentrum.perun.webgui.tabs.memberstabs.ChangeGroupStatusTabItem;
import cz.metacentrum.perun.webgui.tabs.memberstabs.ChangeStatusTabItem;
import cz.metacentrum.perun.webgui.tabs.memberstabs.MemberDetailTabItem;
import cz.metacentrum.perun.webgui.widgets.Confirm;
import cz.metacentrum.perun.webgui.widgets.PerunTable;
import cz.metacentrum.perun.webgui.widgets.cells.CustomClickableTextCellWithAuthz;
import cz.metacentrum.perun.webgui.widgets.cells.PerunStatusCell;
import java.util.ArrayList;
/**
* Provide columns definitions for RichMember object
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class MemberColumnProvider {
private PerunTable<RichMember> table;
private FieldUpdater<RichMember, RichMember> fieldUpdater;
private ListDataProvider<RichMember> dataProvider;
private ArrayList<RichMember> backupList;
/**
* New instance of MemberColumnProvider
*
* @param table table to add columns to
* @param fieldUpdater field updater used when cell is "clicked"
*/
public MemberColumnProvider(PerunTable<RichMember> table, FieldUpdater<RichMember, RichMember> fieldUpdater) {
this.table = table;
this.fieldUpdater = fieldUpdater;
}
/**
* New instance of MemberColumnProvider
*
* @param dataProvider associated with table (for refresh purpose)
* @param table table to add columns to
* @param fieldUpdater field updater used when cell is "clicked"
*/
public MemberColumnProvider(ListDataProvider<RichMember> dataProvider, ArrayList<RichMember> backupList, PerunTable<RichMember> table, FieldUpdater<RichMember, RichMember> fieldUpdater) {
this.dataProvider = dataProvider ;
this.backupList = backupList;
this.table = table;
this.fieldUpdater = fieldUpdater;
}
public void addIdColumn(IsClickableCell authz) {
addIdColumn(authz, 0);
}
public void addIdColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> idColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "id"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column only if extended info is visible
if (JsonUtils.isExtendedInfoVisible()) {
table.addColumn(idColumn, "Member Id");
if (width != 0) {
table.setColumnWidth(idColumn, width, Style.Unit.PX);
}
// sort column
idColumn.setSortable(true);
table.getColumnSortHandler().setComparator(idColumn, new RichMemberComparator(RichMemberComparator.Column.MEMBER_ID));
}
}
public void addUserIdColumn(IsClickableCell authz) {
addUserIdColumn(authz, 0);
}
public void addUserIdColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> idColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "userId"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column only if extended info is visible
if (JsonUtils.isExtendedInfoVisible()) {
table.addColumn(idColumn, "User Id");
if (width != 0) {
table.setColumnWidth(idColumn, width, Style.Unit.PX);
}
// sort column
idColumn.setSortable(true);
table.getColumnSortHandler().setComparator(idColumn, new RichMemberComparator(RichMemberComparator.Column.USER_ID));
}
}
public void addNameColumn(IsClickableCell authz) {
addNameColumn(authz, 0);
}
public void addNameColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> nameColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "name"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column
table.addColumn(nameColumn, "Name");
if (width != 0) {
table.setColumnWidth(nameColumn, width, Style.Unit.PX);
}
// sort column
nameColumn.setSortable(true);
table.getColumnSortHandler().setComparator(nameColumn, new RichMemberComparator(RichMemberComparator.Column.USER_FULL_NAME));
}
public void addOrganizationColumn(IsClickableCell authz) {
addOrganizationColumn(authz, 0);
}
public void addOrganizationColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> organizationColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "organization"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column
table.addColumn(organizationColumn, "Organization");
if (width != 0) {
table.setColumnWidth(organizationColumn, width, Style.Unit.PX);
}
// sort column
organizationColumn.setSortable(true);
table.getColumnSortHandler().setComparator(organizationColumn, new RichMemberComparator(RichMemberComparator.Column.ORGANIZATION));
}
public void addEmailColumn(IsClickableCell authz) {
addEmailColumn(authz, 0);
}
public void addEmailColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> emailColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "email"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column
table.addColumn(emailColumn, "Email");
if (width != 0) {
table.setColumnWidth(emailColumn, width, Style.Unit.PX);
}
// sort column
emailColumn.setSortable(true);
table.getColumnSortHandler().setComparator(emailColumn, new RichMemberComparator(RichMemberComparator.Column.EMAIL));
}
public void addLoginsColumn(IsClickableCell authz) {
addLoginsColumn(authz, 0);
}
public void addLoginsColumn(IsClickableCell authz, int width) {
// create column
Column<RichMember, RichMember> loginsColumn = JsonUtils.addColumn(new CustomClickableTextCellWithAuthz<RichMember>(authz, "logins"), new JsonUtils.GetValue<RichMember, RichMember>() {
@Override
public RichMember getValue(RichMember object) {
return object;
}
}, fieldUpdater);
// add column
table.addColumn(loginsColumn, "Logins");
if (width != 0) {
table.setColumnWidth(loginsColumn, width, Style.Unit.PX);
}
// logins are not sortable
//organizationColumn.setSortable(true);
//table.getColumnSortHandler().setComparator(organizationColumn, new RichMemberComparator(RichMemberComparator.Column.));
}
public void addStatusColumn(IsClickableCell authz) {
addStatusColumn(authz, 0);
}
public void addStatusColumn(IsClickableCell authz, int width) {
// Status column
final Column<RichMember, String> statusColumn = new Column<RichMember, String>(
new PerunStatusCell()) {
@Override
public String getValue(RichMember object) {
return object.getStatus();
}
};
// own onClick tab for changing member's status
statusColumn.setFieldUpdater(new FieldUpdater<RichMember,String>(){
@Override
public void update(final int index, final RichMember object, final String value) {
PerunWebSession.getInstance().getTabManager().addTabToCurrentTab(new ChangeStatusTabItem(object.cast(), new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso) {
Member m = jso.cast();
// set status to object in cell to change rendered value
object.setStatus(m.getStatus());
// forcefully set status to objects in lists,
// because they are not updated during .update() on cell
for (RichMember rm : dataProvider.getList()) {
if (rm.getId() == m.getId()) {
rm.setStatus(m.getStatus());
}
}
if (backupList != null) {
for (RichMember rm : backupList) {
if (rm.getId() == m.getId()) {
rm.setStatus(m.getStatus());
}
}
}
dataProvider.refresh();
dataProvider.flush();
}
}));
}
});
// add column
table.addColumn(statusColumn, "Status");
if (width != 0) {
table.setColumnWidth(statusColumn, width, Style.Unit.PX);
}
// status column sortable
statusColumn.setSortable(true);
table.getColumnSortHandler().setComparator(statusColumn, new GeneralComparator<RichMember>(GeneralComparator.Column.STATUS));
}
public void addGroupStatusColumn(IsClickableCell authz, int groupId) {
addGroupStatusColumn(authz, groupId, 0);
}
public void addGroupStatusColumn(IsClickableCell authz, int groupId, int width) {
// Status column
final Column<RichMember, String> statusColumn = new Column<RichMember, String>(
new PerunStatusCell()) {
@Override
public String getValue(RichMember object) {
return object.getGroupStatus();
}
};
// own onClick tab for changing member's status
statusColumn.setFieldUpdater(new FieldUpdater<RichMember,String>(){
@Override
public void update(final int index, final RichMember object, final String value) {
if ("INDIRECT".equalsIgnoreCase(object.getMembershipType())) {
UiElements.generateInfo("Can't change group membership status!", "INDIRECT members can't have their group membership status changed directly." +
"<p>Please change members group status in all sourcing groups." +
"<p>In order to expire member in a group, member must be set to EXPIRED in all sourcing groups (sub-groups and groups in relation)." +
"<p>In order to validate member, at least one sourcing group must have member with VALID group membership status.");
} else {
PerunWebSession.getInstance().getTabManager().addTabToCurrentTab(new ChangeGroupStatusTabItem(object, groupId, new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso) {
Member m = jso.cast();
// set status to object in cell to change rendered value
object.setGroupStatus(m.getGroupStatus());
// forcefully set status to objects in lists,
// because they are not updated during .update() on cell
for (RichMember rm : dataProvider.getList()) {
if (rm.getId() == m.getId()) {
rm.setGroupStatus(m.getGroupStatus());
}
}
if (backupList != null) {
for (RichMember rm : backupList) {
if (rm.getId() == m.getId()) {
rm.setGroupStatus(m.getGroupStatus());
}
}
}
dataProvider.refresh();
dataProvider.flush();
}
}));
}
}
});
// add column
table.addColumn(statusColumn, "Group Status");
if (width != 0) {
table.setColumnWidth(statusColumn, width, Style.Unit.PX);
}
// status column sortable
statusColumn.setSortable(true);
table.getColumnSortHandler().setComparator(statusColumn, new RichMemberComparator(RichMemberComparator.Column.GROUP_STATUS));
}
public static IsClickableCell<GeneralObject> getDefaultClickableAuthz() {
return new IsClickableCell<GeneralObject>() {
@Override
public boolean isClickable(GeneralObject object) {
RichMember rm = object.cast();
return (PerunWebSession.getInstance().isVoAdmin(rm.getVoId()) || PerunWebSession.getInstance().isVoObserver(rm.getVoId()));
}
@Override
public String linkUrl(GeneralObject object) {
RichMember rm = object.cast();
return MembersTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + MemberDetailTabItem.URL + "?id=" + rm.getId()+"&active=1";
}
};
}
}
| |
package com.jetbrains.edu.learning.courseFormat.tasks;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.intellij.lang.Language;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.xmlb.XmlSerializer;
import com.intellij.util.xmlb.annotations.Transient;
import com.jetbrains.edu.learning.EduPluginConfigurator;
import com.jetbrains.edu.learning.StudyUtils;
import com.jetbrains.edu.learning.checker.StudyTaskChecker;
import com.jetbrains.edu.learning.core.EduNames;
import com.jetbrains.edu.learning.courseFormat.*;
import com.jetbrains.edu.learning.stepic.EduAdaptiveStepicConnector;
import com.jetbrains.edu.learning.stepic.EduStepicConnector;
import one.util.streamex.EntryStream;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Implementation of task which contains task files, tests, input file for tests
*
* To implement new task there are 4 steps to be done:
* - extend Task class
* - go to Lesson and update elementTypes in taskList AbstractCollection. Needed for proper xml serialization
* - Update TaskSerializer and TaskDeserializer in StudySerializationUtil to handle json serialization
* - for Adaptive tasks update taskTypes in EduAdaptiveStepicConnector so new task type can be added to a course
*/
public abstract class Task implements StudyItem {
@Expose private String name;
// index is visible to user number of task from 1 to task number
private int myIndex;
protected StudyStatus myStatus = StudyStatus.Unchecked;
@SerializedName("stepic_id")
@Expose private int myStepId;
@SerializedName("task_files")
@Expose public Map<String, TaskFile> taskFiles = new HashMap<>();
@SerializedName("test_files")
@Expose protected Map<String, String> testsText = new HashMap<>();
@SerializedName("task_texts")
@Expose protected Map<String, String> taskTexts = new HashMap<>();
@Transient private Lesson myLesson;
@Expose @SerializedName("update_date") private Date myUpdateDate;
public Task() {}
public Task(@NotNull final String name) {
this.name = name;
}
/**
* Initializes state of task file
*
* @param lesson lesson which task belongs to
*/
public void initTask(final Lesson lesson, boolean isRestarted) {
setLesson(lesson);
if (!isRestarted) myStatus = StudyStatus.Unchecked;
for (TaskFile taskFile : getTaskFiles().values()) {
taskFile.initTaskFile(this, isRestarted);
}
}
@SuppressWarnings("unused")
//used for deserialization
public void setTaskTexts(Map<String, String> taskTexts) {
this.taskTexts = taskTexts;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public int getIndex() {
return myIndex;
}
@Override
public void setIndex(int index) {
myIndex = index;
}
public Map<String, String> getTestsText() {
return testsText;
}
@SuppressWarnings("unused")
//used for deserialization
public void setTestsText(Map<String, String> testsText) {
this.testsText = testsText;
}
public Map<String, String> getTaskTexts() {
return taskTexts;
}
public void addTestsTexts(String name, String text) {
testsText.put(name, text);
}
public void addTaskText(String name, String text) {
taskTexts.put(name, text);
}
public Map<String, TaskFile> getTaskFiles() {
return taskFiles;
}
@Nullable
public TaskFile getTaskFile(final String name) {
return name != null ? taskFiles.get(name) : null;
}
public void addTaskFile(@NotNull final String name, int index) {
TaskFile taskFile = new TaskFile();
taskFile.setIndex(index);
taskFile.setTask(this);
taskFile.name = name;
taskFiles.put(name, taskFile);
}
public void addTaskFile(@NotNull final TaskFile taskFile) {
taskFiles.put(taskFile.name, taskFile);
}
@Nullable
public TaskFile getFile(@NotNull final String fileName) {
return taskFiles.get(fileName);
}
@Transient
public Lesson getLesson() {
return myLesson;
}
@Transient
public void setLesson(Lesson lesson) {
myLesson = lesson;
}
@Nullable
public VirtualFile getTaskDir(@NotNull final Project project) {
String lessonDirName = EduNames.LESSON + String.valueOf(myLesson.getIndex());
String taskDirName = EduNames.TASK + String.valueOf(myIndex);
VirtualFile courseDir = project.getBaseDir();
if (courseDir != null) {
VirtualFile lessonDir = courseDir.findChild(lessonDirName);
if (lessonDir != null) {
VirtualFile taskDir = lessonDir.findChild(taskDirName);
if (taskDir == null) {
return null;
}
VirtualFile srcDir = taskDir.findChild(EduNames.SRC);
return srcDir != null ? srcDir : taskDir;
}
}
return null;
}
/**
* @param wrap if true, text will be wrapped with ancillary information (e.g. to display latex)
*/
public String getTaskDescription(boolean wrap) {
String fileName = getTaskDescriptionName();
//TODO: replace this with simple get after implementing migration for taskTexts
Map.Entry<String, String> entry =
EntryStream.of(taskTexts).findFirst(e -> FileUtil.getNameWithoutExtension(e.getKey()).equals(fileName)).orElse(null);
if (entry == null) {
return null;
}
String taskText = entry.getValue();
if (!wrap) {
return taskText;
}
taskText = StudyUtils.wrapTextToDisplayLatex(StudyUtils.convertToHtml(taskText));
if (getLesson().getCourse().isAdaptive()) {
taskText = EduAdaptiveStepicConnector.wrapAdaptiveCourseText(this, taskText);
}
return taskText;
}
public String getTaskDescription() {
return getTaskDescription(true);
}
public String getTaskDescriptionName() {
return EduNames.TASK;
}
@NotNull
public String getTestsText(@NotNull final Project project) {
final Course course = getLesson().getCourse();
final Language language = course.getLanguageById();
final EduPluginConfigurator configurator = EduPluginConfigurator.INSTANCE.forLanguage(language);
final VirtualFile taskDir = getTaskDir(project);
if (taskDir != null) {
final VirtualFile file = taskDir.findChild(configurator.getTestFileName());
if (file == null) return "";
final Document document = FileDocumentManager.getInstance().getDocument(file);
if (document != null) {
return document.getImmutableCharSequence().toString();
}
}
return "";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Task task = (Task)o;
if (myIndex != task.myIndex) return false;
if (name != null ? !name.equals(task.name) : task.name != null) return false;
if (taskFiles != null ? !taskFiles.equals(task.taskFiles) : task.taskFiles != null) return false;
if (taskTexts != null ? !taskTexts.equals(task.taskTexts) : task.taskTexts != null) return false;
if (testsText != null ? !testsText.equals(task.testsText) : task.testsText != null) return false;
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + myIndex;
result = 31 * result + (taskFiles != null ? taskFiles.hashCode() : 0);
result = 31 * result + (taskTexts != null ? taskTexts.hashCode() : 0);
result = 31 * result + (testsText != null ? testsText.hashCode() : 0);
return result;
}
public void setStepId(int stepId) {
myStepId = stepId;
}
public int getStepId() {
return myStepId;
}
public StudyStatus getStatus() {
return myStatus;
}
public void setStatus(StudyStatus status) {
for (TaskFile taskFile : taskFiles.values()) {
for (AnswerPlaceholder placeholder : taskFile.getActivePlaceholders()) {
placeholder.setStatus(status);
}
}
myStatus = status;
}
public Task copy() {
Element element = XmlSerializer.serialize(this);
Task copy = XmlSerializer.deserialize(element, getClass());
copy.initTask(null, true);
return copy;
}
public void setUpdateDate(Date date) {
myUpdateDate = date;
}
public Date getUpdateDate() {
return myUpdateDate;
}
public boolean isUpToDate() {
if (getStepId() == 0) return true;
final Date date = EduStepicConnector.getTaskUpdateDate(getStepId());
if (date == null) return true;
if (myUpdateDate == null) return false;
return !date.after(myUpdateDate);
}
public void copyTaskParameters(Task task) {
setName(task.getName());
setIndex(task.getIndex());
setStatus(task.getStatus());
setStepId(task.getStepId());
taskFiles = task.getTaskFiles();
testsText = task.getTestsText();
taskTexts = task.getTaskTexts();
setLesson(task.getLesson());
setUpdateDate(task.getUpdateDate());
}
// used in json serialization/deserialization
public abstract String getTaskType();
public StudyTaskChecker getChecker(@NotNull Project project) {
return new StudyTaskChecker<>(this, project);
}
public int getPosition() {
final Lesson lesson = getLesson();
return lesson.getTaskList().indexOf(this) + 1;
}
public void saveTaskText(String text) {
taskTexts.put(getTaskDescriptionName(), text);
}
}
| |
package ur_rna.GUITester.GuiTools.Matchers;
import abbot.finder.Matcher;
import java.awt.*;
import java.util.Objects;
public abstract class ComposableBase implements ComposableMatcher {
public static final ComposableMatcher TRUE = new ComposableBase() {
@Override
public boolean matches(Component component) {
return true;
}
@Override
public ComposableMatcher inverse() {
return FALSE;
}
@Override
public ComposableMatcher and(Matcher other) {
return identity(other);
}
@Override
public ComposableMatcher or(Matcher other) {
return TRUE;
}
@Override
public ComposableMatcher xor(Matcher other) {
return inverse(other);
}
@Override
public ComposableMatcher simplify() {
return this;
}
};
public static final ComposableMatcher FALSE = new ComposableBase() {
@Override
public boolean matches(Component component) {
return false;
}
@Override
public ComposableMatcher inverse() {
return TRUE;
}
@Override
public ComposableMatcher and(Matcher other) {
return FALSE;
}
@Override
public ComposableMatcher or(Matcher other) {
return identity(other);
}
@Override
public ComposableMatcher xor(Matcher other) {
return identity(other);
}
@Override
public ComposableMatcher simplify() {
return this;
}
};
public static ComposableMatcher composable(Matcher m) {
return identity(m);
}
public static ComposableMatcher inverse(Matcher operand) {
if (operand instanceof Inverse)
return identity(((Inverse) operand).getReference());
if (operand instanceof Identity)
return inverse(((Identity) operand).self());
if (operand == TRUE)
return FALSE;
if (operand == FALSE || operand == null)
return TRUE;
return new Inverse(operand);
}
protected static ComposableMatcher identity(Matcher operand) {
if (operand instanceof ComposableMatcher)
return (ComposableMatcher) operand;
return new Identity(operand);
}
protected static ComposableMatcher simplify(Matcher m) {
if (m == null) return null;
if (m instanceof ComposableMatcher)
return ((ComposableMatcher) m).simplify();
return new Identity(m);
}
// public static Matcher simplifyFull(Matcher m) {
// if (m instanceof ComposableMatcher)
// return ((ComposableMatcher) m).simplify().self();
// return m;
// }
public ComposableMatcher and(Matcher other) {
return BinaryMatcher.and(this, other);
}
public ComposableMatcher or(Matcher other) {
return BinaryMatcher.or(this, other);
}
public ComposableMatcher xor(Matcher other) {
return BinaryMatcher.xor(this, other);
}
public ComposableMatcher inverse() {
return ComposableBase.inverse(this);
}
public ComposableMatcher simplify() { return this; } //override this if this matcher can be simplified or reduced.
public Matcher self() {
return this; //override this if there is a more direct representation of this Matcher.
}
public ComposableMatcher withText(String text) {
return this.and(new DescriptionMatcher(text));
}
public ComposableMatcher withText(String text, int searchScope) {
return this.and(new DescriptionMatcher(text, searchScope));
}
public ComposableMatcher withText(String text, boolean searchCaption, boolean searchText, boolean searchName) {
int scope = 0;
if (searchCaption)scope |= DescriptionMatcher.SEARCH_CAPTION;
if (searchText)scope |= DescriptionMatcher.SEARCH_TEXT;
if (searchName)scope |= DescriptionMatcher.SEARCH_NAME;
return withText(text, scope);
}
@Override
public String toString() {
String type = this.getClass().getSimpleName();
if (!type.toLowerCase().contains("matcher"))
type = type + " Matcher";
return "{ " + type + " }";
}
protected static class Inverse extends ComposableBase {
private Matcher _m;
public Inverse(Matcher reference) {
_m = reference;
}
public Matcher getReference() {
return _m;
}
@Override
public boolean matches(Component component) {
return !_m.matches(component);
}
@Override
public ComposableMatcher simplify() {
return inverse(simplify(_m));
}
@Override
public String toString() {
return "NOT " + Objects.toString(_m);
}
}
protected static class Identity extends ComposableBase {
private Matcher _m;
public Identity(Matcher reference) {
_m = reference;
}
public Matcher getReference() {
return _m;
}
@Override
public boolean matches(Component component) {
return _m.matches(component);
}
@Override
public ComposableMatcher simplify() {
if (_m instanceof ComposableMatcher)
return ((ComposableMatcher) _m).simplify(); // implicitly performs sub-IdentityMatcher flattening.
return this;
}
@Override
public Matcher self() {
return _m;
}
@Override
public String toString() {
return Objects.toString(_m);
}
}
protected static abstract class BinaryMatcher extends ComposableBase {
protected BinaryMatcher(Matcher a, Matcher b) {
if (a instanceof ComposableMatcher) a = ((ComposableMatcher) a).self();
if (b instanceof ComposableMatcher) b = ((ComposableMatcher) b).self();
_a = a;
_b = b;
}
protected Matcher _a;
protected Matcher _b;
public void setRhs(Matcher m) {
_b = m;
}
public Matcher getRhs() {
return _b;
}
public void setLhs(Matcher m) {
_a = m;
}
public Matcher getLhs() {
return _a;
}
@Override
public String toString() {
return String.format("(%s %s %s)", _a, getOperandName().toUpperCase(), _b);
}
public String getOperandName() {
return this.getClass().getSimpleName().toLowerCase();
}
public static ComposableMatcher and(Matcher a, Matcher b) {
if (a == TRUE || a == null)
return identity(b);
if (b == TRUE || b == null)
return identity(a);
if (a == FALSE || b == FALSE)
return FALSE;
return new And(a, b);
}
public static ComposableMatcher or(Matcher a, Matcher b) {
if (a == FALSE || a == null)
return identity(b);
if (b == FALSE || b == null)
return identity(a);
if (a == TRUE || b == TRUE)
return TRUE;
return new Or(a, b);
}
public static ComposableMatcher xor(Matcher a, Matcher b) {
if (a == FALSE || a == null)
return identity(b);
if (b == FALSE || b == null)
return identity(a);
if (a == TRUE)
return inverse(b);
if (b == TRUE)
return inverse(a);
return new Xor(a, b);
}
public static class And extends BinaryMatcher {
public And(Matcher a, Matcher b) {
super(a, b);
}
@Override
public boolean matches(Component c) {
return (_a == null || _a.matches(c)) && (_b == null || _b.matches(c));
}
@Override
public ComposableMatcher simplify() {
return BinaryMatcher.and(simplify(_a), simplify(_b));
}
}
public static class Or extends BinaryMatcher {
public Or(Matcher a, Matcher b) {
super(a, b);
}
@Override
public boolean matches(Component c) {
return (_a != null && _a.matches(c)) || (_b != null && _b.matches(c));
}
@Override
public ComposableMatcher simplify() {
return BinaryMatcher.or(simplify(_a), simplify(_b));
}
}
public static class Xor extends BinaryMatcher {
public Xor(Matcher a, Matcher b) {
super(a, b);
}
@Override
public boolean matches(Component c) {
return (_a != null && _a.matches(c)) ^ (_b != null && _b.matches(c));
}
@Override
public ComposableMatcher simplify() {
return BinaryMatcher.xor(simplify(_a), simplify(_b));
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.geode.internal.offheap.annotations.Unretained;
/**
*
* Handle some simple editing of fixed-length arrays.
*
* TODO use Java 1.5 template classes to simplify this interface
*
*/
public abstract class ArrayUtils {
/**
* Gets the element at index in the array in a bound-safe manner. If index is not a valid index in
* the given array, then the default value is returned.
* <p/>
*
* @param <T> the class type of the elements in the array.
* @param array the array from which the element at index is retrieved.
* @param index the index into the array to retrieve the element.
* @param defaultValue the default value of element type to return in the event that the array
* index is invalid.
* @return the element at index from the array or the default value if the index is invalid.
*/
public static <T> T getElementAtIndex(T[] array, int index, T defaultValue) {
try {
return array[index];
} catch (ArrayIndexOutOfBoundsException ignore) {
return defaultValue;
}
}
/**
* Gets the first element from the given array or null if the array reference is null or the array
* length is 0.
* <p/>
*
* @param <T> the Class type of the elements in the array.
* @param array the array of elements from which to retrieve the first element.
* @return the first element from the array or null if either the array reference is null or the
* array length is 0.
*/
public static <T> T getFirst(final T... array) {
return (array != null && array.length > 0 ? array[0] : null);
}
/**
* Converts the specified Object array into a String representation.
* <p/>
*
* @param array the Object array of elements to convert to a String.
* @return a String representation of the Object array.
* @see java.lang.StringBuilder
*/
public static String toString(final Object... array) {
final StringBuilder buffer = new StringBuilder("[");
int count = 0;
if (array != null) {
for (final Object element : array) {
buffer.append(count++ > 0 ? ", " : StringUtils.EMPTY).append(element);
}
}
buffer.append("]");
return buffer.toString();
}
public static String toString(final String... array) {
return toString((Object[]) array);
}
/**
* Insert an element into an array. The element is inserted at the given position, all elements
* afterwards are moved to the right.
*
* @param originalArray array to insert into
* @param pos position at which to insert the element
* @param element element to add
* @return the new array
*/
public static Object[] insert(Object[] originalArray, int pos, Object element) {
Object[] newArray = (Object[]) java.lang.reflect.Array
.newInstance(originalArray.getClass().getComponentType(), originalArray.length + 1);
// Test Cases (proof of correctness by examining corner cases)
// 1) A B C D insert at 0: expect X A B C D
// 2) A B C D insert at 2: expect A B X C D
// 3) A B C D insert at 4: expect A B C D X
// copy everything before the given position
if (pos > 0) {
System.arraycopy(originalArray, 0, newArray, 0, pos); // does not copy originalArray[pos],
// where we insert
}
// 1) A B C D insert at 0: no change, ". . . . ."
// 2) A B C D insert at 2: copy "A B", "A B . . ."
// 3) A B C D insert at 4: copy "A B C D", "A B C D ."
// insert
newArray[pos] = element;
// 1) A B C D insert at 0: "X . . . ."
// 2) A B C D insert at 2: "A B X . ."
// 3) A B C D insert at 4: "A B C D X" (all done)
// copy remaining elements
if (pos < originalArray.length) {
System.arraycopy(originalArray, pos, // originalArray[pos] first element copied
newArray, pos + 1, // newArray[pos + 1] first destination
originalArray.length - pos); // number of elements left
}
// 1) A B C D insert at 0: "A B C D" copied at 1: "X A B C D"
// 2) A B C D insert at 2: "C D" copied at 3: "A B X C D"
// 3) A B C D insert at 4: no change
return newArray;
}
/**
* Remove element from an array. The element is removed at the specified position, and all
* remaining elements are moved to the left.
*
* @param originalArray array to remove from
* @param pos position to remove
* @return the new array
*/
public static Object[] remove(Object[] originalArray, int pos) {
Object[] newArray = (Object[]) java.lang.reflect.Array
.newInstance(originalArray.getClass().getComponentType(), originalArray.length - 1);
// Test cases: (proof of correctness)
// 1) A B C D E remove 0: expect "B C D E"
// 2) A B C D E remove 2: expect "A B D E"
// 3) A B C D E remove 4: expect "A B C D"
// Copy everything before
if (pos > 0) {
System.arraycopy(originalArray, 0, newArray, 0, pos); // originalArray[pos - 1] is last
// element copied
}
// 1) A B C D E remove 0: no change, ". . . ."
// 2) A B C D E remove 2: "A B" copied at beginning: "A B . ."
// 3) A B C D E remove 4: "A B C D" copied (all done)
// Copy everything after
if (pos < originalArray.length - 1) {
System.arraycopy(originalArray, pos + 1, // originalArray[pos + 1] is first element copied
newArray, pos, // first position to copy into
originalArray.length - 1 - pos);
}
// 1) A B C D E remove 0: "B C D E" copied into to position 0
// 2) A B C D E remove 2: "D E" copied into position 2: "A B D E"
// 3) A B C D E remove 4: no change
return newArray;
}
public static String objectRefString(Object obj) {
return obj != null
? obj.getClass().getSimpleName() + '@' + Integer.toHexString(System.identityHashCode(obj))
: "(null)";
}
public static void objectRefString(Object obj, StringBuilder sb) {
if (obj != null) {
sb.append(obj.getClass().getSimpleName()).append('@')
.append(Integer.toHexString(System.identityHashCode(obj)));
} else {
sb.append("(null)");
}
}
/** Get proper string for an object including arrays. */
public static String objectString(Object obj) {
StringBuilder sb = new StringBuilder();
objectString(obj, sb);
return sb.toString();
}
/** Get proper string for an object including arrays. */
public static void objectString(Object obj, StringBuilder sb) {
if (obj instanceof Object[]) {
sb.append('(');
boolean first = true;
for (Object o : (Object[]) obj) {
if (!first) {
sb.append(',');
} else {
first = false;
}
objectString(o, sb);
}
sb.append(')');
} else {
objectStringWithBytes(obj, sb);
}
}
/**
* Get proper string for an an object including arrays with upto one dimension of arrays.
*/
public static String objectStringNonRecursive(@Unretained Object obj) {
StringBuilder sb = new StringBuilder();
objectStringNonRecursive(obj, sb);
return sb.toString();
}
public static boolean areByteArrayArrayEquals(byte[][] v1, byte[][] v2) {
boolean areEqual = false;
if (v1.length == v2.length) {
areEqual = true;
for (int index = 0; index < v1.length; ++index) {
if (!Arrays.equals(v1[index], v2[index])) {
areEqual = false;
break;
}
}
}
return areEqual;
}
/**
* Get proper string for an an object including arrays with upto one dimension of arrays.
*/
public static void objectStringNonRecursive(@Unretained Object obj, StringBuilder sb) {
if (obj instanceof Object[]) {
sb.append('(');
boolean first = true;
for (Object o : (Object[]) obj) {
if (!first) {
sb.append(',');
sb.append(o);
} else {
first = false;
// show the first byte[] for byte[][] storage
objectStringWithBytes(o, sb);
}
}
sb.append(')');
} else {
objectStringWithBytes(obj, sb);
}
}
private static void objectStringWithBytes(@Unretained Object obj, StringBuilder sb) {
if (obj instanceof byte[]) {
sb.append('(');
boolean first = true;
final byte[] bytes = (byte[]) obj;
int numBytes = 0;
for (byte b : bytes) {
if (!first) {
sb.append(',');
} else {
first = false;
}
sb.append(b);
// terminate with ... for large number of bytes
if (numBytes++ >= 5000 && numBytes < bytes.length) {
sb.append(" ...");
break;
}
}
sb.append(')');
} else {
sb.append(obj);
}
}
/**
* Check if two objects, possibly null, are equal. Doesn't really belong to this class...
*/
public static boolean objectEquals(Object o1, Object o2) {
if (o1 == o2) {
return true;
}
if (o1 == null) {
return false;
}
return o1.equals(o2);
}
/**
* Converts the primitive int array into an Integer wrapper object array.
* </p>
*
* @param array the primitive int array to convert into an Integer wrapper object array.
* @return an Integer array containing the values from the elements in the primitive int array.
*/
public static Integer[] toIntegerArray(final int[] array) {
final Integer[] integerArray = new Integer[array == null ? 0 : array.length];
if (array != null) {
for (int index = 0; index < array.length; index++) {
integerArray[index] = array[index];
}
}
return integerArray;
}
/**
* Converts a double byte array into a double Byte array.
*
* @param array the double byte array to convert into double Byte array
* @return a double array of Byte objects containing values from the double byte array
*/
public static Byte[][] toByteArray(final byte[][] array) {
if (array == null) {
return null;
}
final Byte[][] byteArray = new Byte[array.length][];
for (int i = 0; i < array.length; i++) {
byteArray[i] = new Byte[array[i].length];
for (int j = 0; j < array[i].length; j++) {
byteArray[i][j] = array[i][j];
}
}
return byteArray;
}
/**
* Converts a double Byte array into a double byte array.
*
* @param byteArray the double Byte array to convert into a double byte array
* @return a double byte array containing byte values from the double Byte array
*/
public static byte[][] toBytes(final Byte[][] byteArray) {
if (byteArray == null) {
return null;
}
final byte[][] array = new byte[byteArray.length][];
for (int i = 0; i < byteArray.length; i++) {
array[i] = new byte[byteArray[i].length];
for (int j = 0; j < byteArray[i].length; j++) {
array[i][j] = byteArray[i][j];
}
}
return array;
}
/**
* Use this instead of Arrays.asList(T... array) when you need a modifiable List.
*
* Returns a modifiable list containing the elements of the specified array.
*
* <p>
* Example usage:
*
* <pre>
* List<String> stooges = Arrays.asList("Larry", "Moe", "Curly");
* stooges.remove("Curly");
* </pre>
*
* @param <T> the class of the objects in the array
* @param array the array of elements to be added to the list
* @return a list containing the elements of the specified array
*/
public static <T> List<T> asList(T... array) {
return new ArrayList<>(Arrays.asList(array));
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.bond;
import java.time.LocalDate;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.StandardId;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.basics.currency.MultiCurrencyAmount;
import com.opengamma.strata.basics.currency.Payment;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.market.sensitivity.PointSensitivities;
import com.opengamma.strata.market.sensitivity.PointSensitivityBuilder;
import com.opengamma.strata.pricer.CompoundedRateType;
import com.opengamma.strata.pricer.DiscountingPaymentPricer;
import com.opengamma.strata.pricer.ZeroRateSensitivity;
import com.opengamma.strata.product.bond.FixedCouponBondPaymentPeriod;
import com.opengamma.strata.product.bond.ResolvedFixedCouponBond;
import com.opengamma.strata.product.bond.ResolvedFixedCouponBondTrade;
/**
* Pricer for for rate fixed coupon bond trades.
* <p>
* This function provides the ability to price a {@link ResolvedFixedCouponBondTrade}.
*
* <h4>Price</h4>
* Strata uses <i>decimal prices</i> for bonds in the trade model, pricers and market data.
* For example, a price of 99.32% is represented in Strata by 0.9932.
*/
public class DiscountingFixedCouponBondTradePricer {
/**
* Default implementation.
*/
public static final DiscountingFixedCouponBondTradePricer DEFAULT = new DiscountingFixedCouponBondTradePricer(
DiscountingFixedCouponBondProductPricer.DEFAULT,
DiscountingPaymentPricer.DEFAULT);
/**
* Pricer for {@link ResolvedFixedCouponBond}.
*/
private final DiscountingFixedCouponBondProductPricer productPricer;
/**
* Pricer for {@link Payment}.
*/
private final DiscountingPaymentPricer paymentPricer;
/**
* Creates an instance.
*
* @param productPricer the pricer for {@link ResolvedFixedCouponBond}
* @param paymentPricer the pricer for {@link Payment}
*/
public DiscountingFixedCouponBondTradePricer(
DiscountingFixedCouponBondProductPricer productPricer,
DiscountingPaymentPricer paymentPricer) {
this.productPricer = ArgChecker.notNull(productPricer, "productPricer");
this.paymentPricer = ArgChecker.notNull(paymentPricer, "paymentPricer");
}
//-------------------------------------------------------------------------
/**
* Calculates the present value of the fixed coupon bond trade.
* <p>
* The present value of the trade is the value on the valuation date.
* The result is expressed using the payment currency of the bond.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @return the present value of the fixed coupon bond trade
*/
public CurrencyAmount presentValue(ResolvedFixedCouponBondTrade trade, LegalEntityDiscountingProvider provider) {
LocalDate settlementDate = trade.getSettlementDate();
CurrencyAmount pvProduct = productPricer.presentValue(trade.getProduct(), provider, settlementDate);
return presentValueFromProductPresentValue(trade, provider, pvProduct);
}
/**
* Calculates the present value of the fixed coupon bond trade with z-spread.
* <p>
* The present value of the trade is the value on the valuation date.
* The result is expressed using the payment currency of the bond.
* <p>
* The z-spread is a parallel shift applied to continuously compounded rates or periodic
* compounded rates of the discounting curve.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @param zSpread the z-spread
* @param compoundedRateType the compounded rate type
* @param periodsPerYear the number of periods per year
* @return the present value of the fixed coupon bond trade
*/
public CurrencyAmount presentValueWithZSpread(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
double zSpread,
CompoundedRateType compoundedRateType,
int periodsPerYear) {
LocalDate settlementDate = trade.getSettlementDate();
CurrencyAmount pvProduct = productPricer.presentValueWithZSpread(
trade.getProduct(), provider, zSpread, compoundedRateType, periodsPerYear, settlementDate);
return presentValueFromProductPresentValue(trade, provider, pvProduct);
}
private CurrencyAmount presentValueFromProductPresentValue(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
CurrencyAmount productPresentValue) {
CurrencyAmount pvProduct = productPresentValue.multipliedBy(trade.getQuantity());
CurrencyAmount pvPayment = presentValuePayment(trade, provider);
return pvProduct.plus(pvPayment);
}
//-------------------------------------------------------------------------
/**
* Calculates the present value of the fixed coupon bond trade from the clean price of the underlying product.
* <p>
* The present value of the trade is the value on the valuation date.
* The result is expressed using the payment currency of the bond.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @param refData the reference data used to calculate the settlement date
* @param cleanPrice the clean price
* @return the present value of the fixed coupon bond trade
*/
public CurrencyAmount presentValueFromCleanPrice(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
ReferenceData refData,
double cleanPrice) {
ResolvedFixedCouponBond product = trade.getProduct();
LocalDate standardSettlementDate = product.getSettlementDateOffset().adjust(provider.getValuationDate(), refData);
LocalDate tradeSettlementDate = trade.getSettlementDate();
StandardId legalEntityId = product.getLegalEntityId();
Currency currency = product.getCurrency();
double df = provider.repoCurveDiscountFactors(
product.getSecurityId(), legalEntityId, currency).discountFactor(standardSettlementDate);
double pvStandard =
(cleanPrice * product.getNotional() + productPricer.accruedInterest(product, standardSettlementDate)) * df;
if (standardSettlementDate.isEqual(tradeSettlementDate)) {
return presentValueFromProductPresentValue(trade, provider, CurrencyAmount.of(currency, pvStandard));
}
// check coupon payment between two settlement dates
IssuerCurveDiscountFactors discountFactors = provider.issuerCurveDiscountFactors(legalEntityId, currency);
double pvDiff = 0d;
if (standardSettlementDate.isAfter(tradeSettlementDate)) {
pvDiff = productPricer.presentValueCoupon(product, discountFactors, tradeSettlementDate, standardSettlementDate);
} else {
pvDiff = -productPricer.presentValueCoupon(product, discountFactors, standardSettlementDate, tradeSettlementDate);
}
return presentValueFromProductPresentValue(trade, provider, CurrencyAmount.of(currency, pvStandard + pvDiff));
}
/**
* Calculates the present value of the fixed coupon bond trade with z-spread from the
* clean price of the underlying product.
* <p>
* The present value of the trade is the value on the valuation date.
* The result is expressed using the payment currency of the bond.
* <p>
* The z-spread is a parallel shift applied to continuously compounded rates or periodic
* compounded rates of the discounting curve.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @param refData the reference data used to calculate the settlement date
* @param cleanPrice the clean price
* @param zSpread the z-spread
* @param compoundedRateType the compounded rate type
* @param periodsPerYear the number of periods per year
* @return the present value of the fixed coupon bond trade
*/
public CurrencyAmount presentValueFromCleanPriceWithZSpread(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
ReferenceData refData,
double cleanPrice,
double zSpread,
CompoundedRateType compoundedRateType,
int periodsPerYear) {
ResolvedFixedCouponBond product = trade.getProduct();
LocalDate standardSettlementDate = product.getSettlementDateOffset().adjust(provider.getValuationDate(), refData);
LocalDate tradeSettlementDate = trade.getSettlementDate();
StandardId legalEntityId = product.getLegalEntityId();
Currency currency = product.getCurrency();
double df = provider.repoCurveDiscountFactors(
product.getSecurityId(), legalEntityId, currency).discountFactor(standardSettlementDate);
double pvStandard =
(cleanPrice * product.getNotional() + productPricer.accruedInterest(product, standardSettlementDate)) * df;
if (standardSettlementDate.isEqual(tradeSettlementDate)) {
return presentValueFromProductPresentValue(trade, provider, CurrencyAmount.of(currency, pvStandard));
}
// check coupon payment between two settlement dates
IssuerCurveDiscountFactors discountFactors = provider.issuerCurveDiscountFactors(legalEntityId, currency);
double pvDiff = 0d;
if (standardSettlementDate.isAfter(tradeSettlementDate)) {
pvDiff = productPricer.presentValueCouponWithZSpread(
product,
discountFactors,
tradeSettlementDate,
standardSettlementDate,
zSpread,
compoundedRateType,
periodsPerYear);
} else {
pvDiff = -productPricer.presentValueCouponWithZSpread(
product,
discountFactors,
standardSettlementDate,
tradeSettlementDate,
zSpread,
compoundedRateType,
periodsPerYear);
}
return presentValueFromProductPresentValue(trade, provider, CurrencyAmount.of(currency, pvStandard + pvDiff));
}
//-------------------------------------------------------------------------
/**
* Calculates the present value sensitivity of the fixed coupon bond trade.
* <p>
* The present value sensitivity of the trade is the sensitivity of the present value to
* the underlying curves.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @return the present value curve sensitivity of the trade
*/
public PointSensitivities presentValueSensitivity(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider) {
LocalDate settlementDate = trade.getSettlementDate();
PointSensitivityBuilder sensiProduct = productPricer.presentValueSensitivity(
trade.getProduct(), provider, settlementDate);
return presentValueSensitivityFromProductPresentValueSensitivity(trade, provider, sensiProduct).build();
}
/**
* Calculates the present value sensitivity of the fixed coupon bond trade with z-spread.
* <p>
* The present value sensitivity of the trade is the sensitivity of the present value to
* the underlying curves.
* <p>
* The z-spread is a parallel shift applied to continuously compounded rates or periodic
* compounded rates of the discounting curve.
* <p>
* Coupon payments of the underlying product are considered based on the settlement date of the trade.
*
* @param trade the trade
* @param provider the rates provider
* @param zSpread the z-spread
* @param compoundedRateType the compounded rate type
* @param periodsPerYear the number of periods per year
* @return the present value curve sensitivity of the trade
*/
public PointSensitivities presentValueSensitivityWithZSpread(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
double zSpread,
CompoundedRateType compoundedRateType,
int periodsPerYear) {
LocalDate settlementDate = trade.getSettlementDate();
PointSensitivityBuilder sensiProduct = productPricer.presentValueSensitivityWithZSpread(
trade.getProduct(), provider, zSpread, compoundedRateType, periodsPerYear, settlementDate);
return presentValueSensitivityFromProductPresentValueSensitivity(trade, provider, sensiProduct).build();
}
private PointSensitivityBuilder presentValueSensitivityFromProductPresentValueSensitivity(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
PointSensitivityBuilder productPresnetValueSensitivity) {
PointSensitivityBuilder sensiProduct = productPresnetValueSensitivity.multipliedBy(trade.getQuantity());
PointSensitivityBuilder sensiPayment = presentValueSensitivityPayment(trade, provider);
return sensiProduct.combinedWith(sensiPayment);
}
//-------------------------------------------------------------------------
/**
* Calculates the currency exposure of the fixed coupon bond trade.
*
* @param trade the trade
* @param provider the rates provider
* @return the currency exposure of the fixed coupon bond trade
*/
public MultiCurrencyAmount currencyExposure(ResolvedFixedCouponBondTrade trade, LegalEntityDiscountingProvider provider) {
return MultiCurrencyAmount.of(presentValue(trade, provider));
}
/**
* Calculates the currency exposure of the fixed coupon bond trade with z-spread.
*
* @param trade the trade
* @param provider the rates provider
* @param zSpread the z-spread
* @param compoundedRateType the compounded rate type
* @param periodsPerYear the number of periods per year
* @return the currency exposure of the fixed coupon bond trade
*/
public MultiCurrencyAmount currencyExposureWithZSpread(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider,
double zSpread,
CompoundedRateType compoundedRateType,
int periodsPerYear) {
return MultiCurrencyAmount.of(presentValueWithZSpread(trade, provider, zSpread, compoundedRateType, periodsPerYear));
}
/**
* Calculates the current of the fixed coupon bond trade.
*
* @param trade the trade
* @param valuationDate the valuation date
* @return the current cash amount
*/
public CurrencyAmount currentCash(ResolvedFixedCouponBondTrade trade, LocalDate valuationDate) {
Payment upfrontPayment = upfrontPayment(trade);
Currency currency = upfrontPayment.getCurrency(); // assumes single currency is involved in trade
CurrencyAmount currentCash = CurrencyAmount.zero(currency);
if (upfrontPayment.getDate().equals(valuationDate)) {
currentCash = currentCash.plus(upfrontPayment.getValue());
}
LocalDate settlementDate = trade.getSettlementDate();
ResolvedFixedCouponBond product = trade.getProduct();
if (!settlementDate.isAfter(valuationDate)) {
double cashCoupon = product.hasExCouponPeriod() ? 0d : currentCashCouponPayment(product, valuationDate);
Payment payment = product.getNominalPayment();
double cashNominal = payment.getDate().isEqual(valuationDate) ? payment.getAmount() : 0d;
currentCash = currentCash.plus(CurrencyAmount.of(currency, (cashCoupon + cashNominal) * trade.getQuantity()));
}
return currentCash;
}
private double currentCashCouponPayment(ResolvedFixedCouponBond product, LocalDate referenceDate) {
double cash = 0d;
for (FixedCouponBondPaymentPeriod period : product.getPeriodicPayments()) {
if (period.getPaymentDate().isEqual(referenceDate)) {
cash += period.getFixedRate() * period.getNotional() * period.getYearFraction();
}
}
return cash;
}
//-------------------------------------------------------------------------
private CurrencyAmount presentValuePayment(ResolvedFixedCouponBondTrade trade, LegalEntityDiscountingProvider provider) {
ResolvedFixedCouponBond product = trade.getProduct();
RepoCurveDiscountFactors discountFactors = provider.repoCurveDiscountFactors(
product.getSecurityId(), product.getLegalEntityId(), product.getCurrency());
Payment upfrontPayment = upfrontPayment(trade);
return paymentPricer.presentValue(upfrontPayment, discountFactors.getDiscountFactors());
}
private PointSensitivityBuilder presentValueSensitivityPayment(
ResolvedFixedCouponBondTrade trade,
LegalEntityDiscountingProvider provider) {
ResolvedFixedCouponBond product = trade.getProduct();
RepoCurveDiscountFactors discountFactors = provider.repoCurveDiscountFactors(
product.getSecurityId(), product.getLegalEntityId(), product.getCurrency());
Payment upfrontPayment = upfrontPayment(trade);
PointSensitivityBuilder pt = paymentPricer.presentValueSensitivity(
upfrontPayment, discountFactors.getDiscountFactors());
if (pt instanceof ZeroRateSensitivity) {
return RepoCurveZeroRateSensitivity.of((ZeroRateSensitivity) pt, discountFactors.getBondGroup());
}
return pt; // NoPointSensitivity
}
//-------------------------------------------------------------------------
/**
* Calculates the payment that was made for the trade.
* <p>
* This is the payment that was made on the settlement date, based on the quantity and clean price.
*
* @param trade the trade
* @return the payment that was made
*/
public Payment upfrontPayment(ResolvedFixedCouponBondTrade trade) {
ResolvedFixedCouponBond product = trade.getProduct();
// payment is based on the dirty price
LocalDate settlementDate = trade.getSettlementDate();
double cleanPrice = trade.getPrice();
double dirtyPrice = productPricer.dirtyPriceFromCleanPrice(product, settlementDate, cleanPrice);
// calculate payment
Currency currency = product.getCurrency();
double quantity = trade.getQuantity();
double notional = product.getNotional();
return Payment.of(CurrencyAmount.of(currency, -quantity * notional * dirtyPrice), settlementDate);
}
}
| |
/*
* 3D City Database - The Open Source CityGML Database
* https://www.3dcitydb.org/
*
* Copyright 2013 - 2021
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.lrg.tum.de/gis/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* Virtual City Systems, Berlin <https://vc.systems/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.core.operation.importer.concurrent;
import org.citydb.config.Config;
import org.citydb.config.project.global.LogLevel;
import org.citydb.core.database.adapter.AbstractDatabaseAdapter;
import org.citydb.core.file.InputFile;
import org.citydb.core.operation.common.cache.CacheTableManager;
import org.citydb.core.operation.common.cache.IdCacheManager;
import org.citydb.core.operation.common.xlink.*;
import org.citydb.core.operation.importer.database.xlink.resolver.*;
import org.citydb.util.concurrent.Worker;
import org.citydb.util.concurrent.WorkerPool;
import org.citydb.util.event.Event;
import org.citydb.util.event.EventDispatcher;
import org.citydb.util.event.EventHandler;
import org.citydb.util.event.global.EventType;
import org.citydb.util.event.global.InterruptEvent;
import org.citydb.util.log.Logger;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.concurrent.locks.ReentrantLock;
public class DBImportXlinkResolverWorker extends Worker<DBXlink> implements EventHandler {
private final Logger log = Logger.getInstance();
private final ReentrantLock runLock = new ReentrantLock();
private volatile boolean shouldRun = true;
private volatile boolean shouldWork = true;
private final Connection connection;
private final boolean isManagedTransaction;
private final DBXlinkResolverManager xlinkResolverManager;
private final EventDispatcher eventDispatcher;
private int updateCounter = 0;
private int commitAfter;
public DBImportXlinkResolverWorker(InputFile inputFile,
Connection connection,
boolean isManagedTransaction,
AbstractDatabaseAdapter databaseAdapter,
WorkerPool<DBXlink> tmpXlinkPool,
IdCacheManager idCacheManager,
CacheTableManager cacheTableManager,
Config config,
EventDispatcher eventDispatcher) throws SQLException {
this.connection = connection;
this.isManagedTransaction = isManagedTransaction;
this.eventDispatcher = eventDispatcher;
commitAfter = config.getDatabaseConfig().getImportBatching().getFeatureBatchSize();
if (commitAfter > databaseAdapter.getMaxBatchSize())
commitAfter = databaseAdapter.getMaxBatchSize();
xlinkResolverManager = new DBXlinkResolverManager(
inputFile,
connection,
databaseAdapter,
tmpXlinkPool,
idCacheManager,
cacheTableManager,
config,
eventDispatcher);
eventDispatcher.addEventHandler(EventType.INTERRUPT, this);
}
@Override
public void interrupt() {
shouldRun = false;
}
@Override
public void run() {
try {
if (firstWork != null) {
doWork(firstWork);
firstWork = null;
}
while (shouldRun) {
try {
DBXlink work = workQueue.take();
doWork(work);
} catch (InterruptedException ie) {
// re-check state
}
}
try {
if (shouldWork) {
xlinkResolverManager.executeBatch();
if (!isManagedTransaction)
connection.commit();
}
} catch (Throwable e) {
try {
connection.rollback();
} catch (SQLException sql) {
//
}
eventDispatcher.triggerSyncEvent(new InterruptEvent("A fatal error occurred during XLink resolving.", LogLevel.ERROR, e, eventChannel, this));
}
} finally {
try {
xlinkResolverManager.close();
} catch (SQLException e1) {
//
}
if (!isManagedTransaction) {
try {
connection.close();
} catch (SQLException e) {
//
}
}
eventDispatcher.removeEventHandler(this);
}
}
private void doWork(DBXlink work) {
final ReentrantLock runLock = this.runLock;
runLock.lock();
try {
if (!shouldWork)
return;
boolean success = false;
DBXlinkEnum type = work.getXlinkType();
switch (type) {
case SURFACE_GEOMETRY:
DBXlinkSurfaceGeometry surfaceGeometry = (DBXlinkSurfaceGeometry)work;
XlinkSurfaceGeometry xlinkSurfaceGeometry = (XlinkSurfaceGeometry)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.SURFACE_GEOMETRY);
if (xlinkSurfaceGeometry != null)
success = xlinkSurfaceGeometry.insert(surfaceGeometry);
break;
case BASIC:
DBXlinkBasic basic = (DBXlinkBasic)work;
XlinkBasic xlinkBasic = (XlinkBasic)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.BASIC);
if (xlinkBasic != null)
success = xlinkBasic.insert(basic);
break;
case TEXTURE_COORD_LIST:
DBXlinkTextureCoordList texCoord = (DBXlinkTextureCoordList)work;
XlinkTexCoordList xlinkTexCoordList = (XlinkTexCoordList)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.TEXCOORDLIST);
if (xlinkTexCoordList != null)
success = xlinkTexCoordList.insert(texCoord);
break;
case TEXTUREPARAM:
DBXlinkTextureParam textureParam = (DBXlinkTextureParam)work;
DBXlinkTextureParamEnum subType = textureParam.getType();
switch (subType) {
case X3DMATERIAL:
case GEOREFERENCEDTEXTURE:
case TEXCOORDGEN:
XlinkTextureParam xlinkTextureParam = (XlinkTextureParam)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.TEXTUREPARAM);
if (xlinkTextureParam != null)
success = xlinkTextureParam.insert(textureParam);
break;
case UNDEFINED:
// nothing to do
}
break;
case TEXTUREASSOCIATION:
DBXlinkTextureAssociation textureAssociation = (DBXlinkTextureAssociation)work;
XlinkTextureAssociation xlinkTextureAssociation = (XlinkTextureAssociation)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.XLINK_TEXTUREASSOCIATION);
if (xlinkTextureAssociation != null)
success = xlinkTextureAssociation.insert(textureAssociation);
break;
case TEXTURE_FILE:
DBXlinkTextureFile externalFile = (DBXlinkTextureFile)work;
XlinkTextureImage xlinkTextureImage = (XlinkTextureImage) xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.TEXTURE_IMAGE);
if (xlinkTextureImage != null)
xlinkTextureImage.insert(externalFile);
// we generate error messages within the modules, so no need for
// a global warning
success = true;
break;
case SURFACE_DATA_TO_TEX_IMAGE:
DBXlinkSurfaceDataToTexImage surfData = (DBXlinkSurfaceDataToTexImage)work;
XlinkSurfaceDataToTexImage xlinkSurfData = (XlinkSurfaceDataToTexImage)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.SURFACE_DATA_TO_TEX_IMAGE);
if (xlinkSurfData != null)
success = xlinkSurfData.insert(surfData);
break;
case LIBRARY_OBJECT:
DBXlinkLibraryObject libObject = (DBXlinkLibraryObject)work;
XlinkLibraryObject xlinkLibraryObject = (XlinkLibraryObject)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.LIBRARY_OBJECT);
if (xlinkLibraryObject != null)
success = xlinkLibraryObject.insert(libObject);
break;
case DEPRECATED_MATERIAL:
DBXlinkDeprecatedMaterial depMaterial = (DBXlinkDeprecatedMaterial)work;
XlinkDeprecatedMaterial xlinkDeprecatedMaterial = (XlinkDeprecatedMaterial)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.XLINK_DEPRECATED_MATERIAL);
if (xlinkDeprecatedMaterial != null)
success = xlinkDeprecatedMaterial.insert(depMaterial);
break;
case GROUP_TO_CITYOBJECT:
DBXlinkGroupToCityObject groupMember = (DBXlinkGroupToCityObject)work;
XlinkGroupToCityObject xlinkGroupToCityObject = (XlinkGroupToCityObject)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.GROUP_TO_CITYOBJECT);
if (xlinkGroupToCityObject != null)
success = xlinkGroupToCityObject.insert(groupMember);
break;
case SOLID_GEOMETRY:
DBXlinkSolidGeometry solidGeometry = (DBXlinkSolidGeometry)work;
XlinkSolidGeometry xlinkSolidGeometry = (XlinkSolidGeometry)xlinkResolverManager.getDBXlinkResolver(DBXlinkResolverEnum.SOLID_GEOMETRY);
if (xlinkSolidGeometry != null)
success = xlinkSolidGeometry.insert(solidGeometry);
break;
default:
return;
}
if (!success) {
log.error("Failed to resolve XLink reference '" + work.getGmlId() + "'.");
} else
updateCounter++;
if (updateCounter == commitAfter) {
xlinkResolverManager.executeBatch();
if (!isManagedTransaction)
connection.commit();
updateCounter = 0;
}
} catch (Throwable e) {
try {
connection.rollback();
} catch (SQLException sql) {
//
}
eventDispatcher.triggerSyncEvent(new InterruptEvent("A fatal error occurred during XLink resolving.", LogLevel.WARN, e, eventChannel, this));
} finally {
runLock.unlock();
}
}
@Override
public void handleEvent(Event event) throws Exception {
if (event.getChannel() == eventChannel)
shouldWork = false;
}
}
| |
/*
* $RCSfile: WBMPCodec.java,v $
*
* Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.
*
* Use is subject to license terms.
*
* $Revision: 1.2 $
* $Date: 2005/12/14 19:24:54 $
* $State: Exp $
*/
package com.lightcrafts.media.jai.codecimpl;
import java.awt.Point;
import java.awt.image.BufferedImage;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.IndexColorModel;
import java.awt.image.MultiPixelPackedSampleModel;
import java.awt.image.Raster;
import java.awt.image.RenderedImage;
import java.awt.image.SampleModel;
import java.awt.image.WritableRaster;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import com.lightcrafts.media.jai.codec.ForwardSeekableStream;
import com.lightcrafts.media.jai.codec.ImageCodec;
import com.lightcrafts.media.jai.codec.ImageDecoder;
import com.lightcrafts.media.jai.codec.ImageDecoderImpl;
import com.lightcrafts.media.jai.codec.ImageDecodeParam;
import com.lightcrafts.media.jai.codec.ImageEncoder;
import com.lightcrafts.media.jai.codec.ImageEncoderImpl;
import com.lightcrafts.media.jai.codec.ImageEncodeParam;
import com.lightcrafts.media.jai.codec.SeekableStream;
/**
* A subclass of <code>ImageCodec</code> that handles the WBMP format.
*/
public final class WBMPCodec extends ImageCodec {
public WBMPCodec() {}
public String getFormatName() {
return "wbmp";
}
public Class getEncodeParamClass() {
return Object.class;
}
public Class getDecodeParamClass() {
return Object.class;
}
public boolean canEncodeImage(RenderedImage im,
ImageEncodeParam param) {
SampleModel sampleModel = im.getSampleModel();
int dataType = sampleModel.getTransferType();
if (dataType == DataBuffer.TYPE_FLOAT ||
dataType == DataBuffer.TYPE_DOUBLE ||
sampleModel.getNumBands() != 1 ||
sampleModel.getSampleSize(0) != 1) {
return false;
}
return true;
}
protected ImageEncoder createImageEncoder(OutputStream dst,
ImageEncodeParam param) {
return new WBMPImageEncoder(dst, null);
}
protected ImageDecoder createImageDecoder(InputStream src,
ImageDecodeParam param) {
// Add buffering for efficiency
if (!(src instanceof BufferedInputStream)) {
src = new BufferedInputStream(src);
}
return new WBMPImageDecoder(new ForwardSeekableStream(src), null);
}
protected ImageDecoder createImageDecoder(SeekableStream src,
ImageDecodeParam param) {
return new WBMPImageDecoder(src, null);
}
public int getNumHeaderBytes() {
return 3;
}
public boolean isFormatRecognized(byte[] header) {
// WBMP has no magic bytes at the beginning so simply check
// the first three bytes for known constraints.
return ((header[0] == (byte)0) && // TypeField == 0
header[1] == 0 && // FixHeaderField == 0xxx00000; not support ext header
((header[2] & 0x8f) != 0 || (header[2] & 0x7f) != 0)); // First width byte
//XXX: header[2] & 0x8f) != 0 for the bug in Sony Ericsson encoder.
}
}
final class WBMPImageEncoder extends ImageEncoderImpl {
// Get the number of bits required to represent an int.
private static int getNumBits(int intValue) {
int numBits = 32;
int mask = 0x80000000;
while(mask != 0 && (intValue & mask) == 0) {
numBits--;
mask >>>= 1;
}
return numBits;
}
// Convert an int value to WBMP multi-byte format.
private static byte[] intToMultiByte(int intValue) {
int numBitsLeft = getNumBits(intValue);
byte[] multiBytes = new byte[(numBitsLeft + 6)/7];
int maxIndex = multiBytes.length - 1;
for(int b = 0; b <= maxIndex; b++) {
multiBytes[b] = (byte)((intValue >>> ((maxIndex - b)*7))&0x7f);
if(b != maxIndex) {
multiBytes[b] |= (byte)0x80;
}
}
return multiBytes;
}
public WBMPImageEncoder(OutputStream output,
ImageEncodeParam param) {
super(output, param);
}
public void encode(RenderedImage im) throws IOException {
// Get the SampleModel.
SampleModel sm = im.getSampleModel();
// Check the data type, band count, and sample size.
int dataType = sm.getTransferType();
if (dataType == DataBuffer.TYPE_FLOAT ||
dataType == DataBuffer.TYPE_DOUBLE) {
throw new IllegalArgumentException(JaiI18N.getString("WBMPImageEncoder0"));
} else if (sm.getNumBands() != 1) {
throw new IllegalArgumentException(JaiI18N.getString("WBMPImageEncoder1"));
} else if (sm.getSampleSize(0) != 1) {
throw new IllegalArgumentException(JaiI18N.getString("WBMPImageEncoder2"));
}
// Save image dimensions.
int width = im.getWidth();
int height = im.getHeight();
// Write WBMP header.
output.write(0); // TypeField
output.write(0); // FixHeaderField
output.write(intToMultiByte(width)); // width
output.write(intToMultiByte(height)); // height
Raster tile = null;
// If the data are not formatted nominally then reformat.
if(sm.getDataType() != DataBuffer.TYPE_BYTE ||
!(sm instanceof MultiPixelPackedSampleModel) ||
((MultiPixelPackedSampleModel)sm).getDataBitOffset() != 0) {
MultiPixelPackedSampleModel mppsm =
new MultiPixelPackedSampleModel(DataBuffer.TYPE_BYTE,
width, height, 1,
(width + 7)/8, 0);
WritableRaster raster =
Raster.createWritableRaster(mppsm,
new Point(im.getMinX(),
im.getMinY()));
raster.setRect(im.getData());
tile = raster;
} else if(im.getNumXTiles() == 1 &&
im.getNumYTiles() == 1) {
tile = im.getTile(im.getMinTileX(), im.getMinTileY());
} else {
tile = im.getData();
}
// Check whether the image is white-is-zero.
boolean isWhiteZero = false;
if(im.getColorModel() instanceof IndexColorModel) {
IndexColorModel icm = (IndexColorModel)im.getColorModel();
isWhiteZero =
(icm.getRed(0) + icm.getGreen(0) + icm.getBlue(0)) >
(icm.getRed(1) + icm.getGreen(1) + icm.getBlue(1));
}
// Get the line stride, bytes per row, and data array.
int lineStride =
((MultiPixelPackedSampleModel)sm).getScanlineStride();
int bytesPerRow = (width + 7)/8;
byte[] bdata = ((DataBufferByte)tile.getDataBuffer()).getData();
// Write the data.
if(!isWhiteZero && lineStride == bytesPerRow) {
// Write the entire image.
output.write(bdata, 0, height*bytesPerRow);
} else {
// Write the image row-by-row.
int offset = 0;
if(!isWhiteZero) {
// Black-is-zero
for(int row = 0; row < height; row++) {
output.write(bdata, offset, bytesPerRow);
offset += lineStride;
}
} else {
// White-is-zero: need to invert data.
byte[] inverted = new byte[bytesPerRow];
for(int row = 0; row < height; row++) {
for(int col = 0; col < bytesPerRow; col++) {
inverted[col] = (byte)(~(bdata[col+offset]));
}
output.write(inverted, 0, bytesPerRow);
offset += lineStride;
}
}
}
}
}
final class WBMPImageDecoder extends ImageDecoderImpl {
public WBMPImageDecoder(SeekableStream input,
ImageDecodeParam param) {
super(input, param);
}
public RenderedImage decodeAsRenderedImage(int page) throws IOException {
if (page != 0) {
throw new IOException(JaiI18N.getString(JaiI18N.getString("WBMPImageDecoder0")));
}
input.read(); // TypeField
input.read(); // FixHeaderField
// Image width
int value = input.read();
int width = value & 0x7f;
while((value & 0x80) == 0x80) {
width <<= 7;
value = input.read();
width |= (value & 0x7f);
}
// Image height
value = input.read();
int height = value & 0x7f;
while((value & 0x80) == 0x80) {
height <<= 7;
value = input.read();
height |= (value & 0x7f);
}
// Create byte-packed bilevel image width an IndexColorModel
BufferedImage bi = new BufferedImage(width,
height,
BufferedImage.TYPE_BYTE_BINARY);
// Get the image tile.
WritableRaster tile = bi.getWritableTile(0, 0);
// Get the SampleModel.
MultiPixelPackedSampleModel sm =
(MultiPixelPackedSampleModel)bi.getSampleModel();
// Read the data.
input.readFully(((DataBufferByte)tile.getDataBuffer()).getData(),
0, height*sm.getScanlineStride());
return bi;
}
}
| |
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage.media.bluray;
import sage.media.format.ContainerFormat;
import sage.nio.BufferedFileChannel;
import sage.nio.LocalFileChannel;
import sage.nio.RemoteFileChannel;
import sage.nio.SageFileChannel;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.*;
public class BluRayFile extends FileChannel implements BluRayStreamer, SageFileChannel
{
private final SageFileChannel sageFileChannel;
private final long totalSize;
private final java.io.File bdmvDir;
private final sage.media.bluray.BluRayParser bdp;
private final sage.media.bluray.MPLSObject currPlaylist;
private final java.io.File[] fileSequence;
private final long[] fileOffsets; // bytes
private final long[] ptsOffsets; // 45kHz
private final long[] chapterOffsets; // 45kHz
private int currFileIndex;
private final java.io.File streamDir;
private int prefAngle;
private final int targetTitle;
/**
* Create a new <code>BluRayStreamer</code> for a local file.
*
* @param bdmvDir The directory containing the bluray.
* @param directBuffer Use a direct byte buffer?
* @param targetTitle The title to select first.
* @param readBufferSize The size to be used for the read buffer.
* @throws IOException If there is an I/O related error.
*/
public BluRayFile(java.io.File bdmvDir, boolean directBuffer, int targetTitle, int readBufferSize) throws IOException
{
this(null, bdmvDir, directBuffer, targetTitle, readBufferSize);
}
/**
* Create a new <code>BluRayStreamer</code> for a remote file.
*
* @param hostname The hostname of the server hosting the bluray content.
* @param bdmvDir The directory containing the bluray.
* @param directBuffer Use a direct byte buffer?
* @param targetTitle The title to select first.
* @param readBufferSize The size to be used for the read buffer.
* @throws IOException If there is an I/O related error.
*/
public BluRayFile(String hostname, java.io.File bdmvDir, boolean directBuffer, int targetTitle, int readBufferSize) throws IOException
{
if (hostname == null)
// Creates
sageFileChannel = new BufferedFileChannel(new LocalFileChannel((String)null, true), readBufferSize, directBuffer);
else
// Opens a socket, but not any files. We will do that with openFile().
sageFileChannel = new BufferedFileChannel(new RemoteFileChannel(hostname, (String)null), readBufferSize, directBuffer);
this.bdmvDir = bdmvDir;
int targetTitleTmp = targetTitle;
bdp = new sage.media.bluray.BluRayParser(bdmvDir, hostname);
bdp.fullyAnalyze();
if (targetTitleTmp <= 0)
targetTitleTmp = bdp.getMainPlaylistIndex() + 1;
this.targetTitle = Math.max(1, Math.min(targetTitleTmp, bdp.getNumPlaylists()));
currPlaylist = bdp.getPlaylist(this.targetTitle - 1);
fileSequence = new java.io.File[currPlaylist.playlistItems.length];
fileOffsets = new long[fileSequence.length];
ptsOffsets = new long[fileSequence.length];
streamDir = new java.io.File(bdmvDir, "STREAM");
long[] totalPts = new long[fileSequence.length];
long totalSizeTmp = 0;
if (hostname == null)
{
// This is a little more efficient when we have direct access to the file system.
for (int i = 0; i < fileSequence.length; i++)
{
fileSequence[i] = new java.io.File(streamDir, currPlaylist.playlistItems[i].itemClips[0].clipName + (bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts"));
fileOffsets[i] = totalSizeTmp;
ptsOffsets[i] = (i == 0 ? 0 : totalPts[i - 1]) - currPlaylist.playlistItems[i].inTime;
totalSizeTmp += fileSequence[i].length();
totalPts[i] = (i == 0 ? 0 : totalPts[i - 1]) + (currPlaylist.playlistItems[i].outTime - currPlaylist.playlistItems[i].inTime);
}
}
else
{
for (int i = 0; i < fileSequence.length; i++)
{
fileSequence[i] = new java.io.File(streamDir, currPlaylist.playlistItems[i].itemClips[0].clipName + (bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts"));
fileOffsets[i] = totalSizeTmp;
ptsOffsets[i] = (i == 0 ? 0 : totalPts[i - 1]) - currPlaylist.playlistItems[i].inTime;
openFile(fileSequence[i].getAbsolutePath());
totalSizeTmp += sageFileChannel.size();
totalPts[i] = (i == 0 ? 0 : totalPts[i - 1]) + (currPlaylist.playlistItems[i].outTime - currPlaylist.playlistItems[i].inTime);
}
}
totalSize = totalSizeTmp;
if (sage.Sage.DBG) System.out.println("Established BluRay file sequence with " + fileSequence.length + " segments and total size=" + totalSizeTmp);
currFileIndex = 0;
openFile(fileSequence[currFileIndex].getAbsolutePath());
chapterOffsets = new long[currPlaylist.playlistMarks.length];
for (int i = 0; i < chapterOffsets.length; i++)
{
int itemRef = currPlaylist.playlistMarks[i].playItemIdRef;
chapterOffsets[i] = (itemRef == 0 ? 0 : totalPts[itemRef - 1]) + currPlaylist.playlistMarks[i].timestamp - currPlaylist.playlistItems[itemRef].inTime;
}
}
private void ensureProperFile(boolean forceOpen) throws java.io.IOException
{
ensureProperFile(forceOpen, position());
}
// pos is the offset as if all of the files were appended one after the other into a bigger file.
private void ensureProperFile(boolean forceOpen, long pos) throws java.io.IOException
{
// Check to see if we need to move to a different file
if (forceOpen || pos < fileOffsets[currFileIndex] || (currFileIndex < fileOffsets.length - 1 && pos >= fileOffsets[currFileIndex + 1]))
{
int oldIndex = currFileIndex;
for (currFileIndex = 0; currFileIndex < fileOffsets.length; currFileIndex++)
{
if (fileOffsets[currFileIndex] > pos)
break;
}
currFileIndex--;
if (sage.Sage.DBG) System.out.println("Switching BluRay source file from index " + oldIndex + " to " + currFileIndex);
int currAngle = Math.min(prefAngle, currPlaylist.playlistItems[currFileIndex].itemClips.length - 1);
sageFileChannel.openFile(
new java.io.File(streamDir,
currPlaylist.playlistItems[currFileIndex].itemClips[currAngle].clipName +
(bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts")).getAbsolutePath());
}
}
private long realPosition()
{
return position() - fileOffsets[currFileIndex];
}
@Override
public long getBytesLeftInClip()
{
return (currFileIndex < fileOffsets.length - 1) ? (fileOffsets[currFileIndex + 1] - position()) : (totalSize - position());
}
@Override
public int getCurrClipIndex()
{
return currFileIndex;
}
@Override
public long getClipPtsOffset(int index)
{
return ptsOffsets[index];
}
@Override
public int getClipIndexForNextRead()
{
if (getBytesLeftInClip() == 0 && currFileIndex < fileOffsets.length - 1)
return currFileIndex + 1;
else
return currFileIndex;
}
@Override
public ContainerFormat getFileFormat()
{
return bdp.getFileFormat(targetTitle - 1);
}
@Override
public int getTitle()
{
return targetTitle;
}
@Override
public long getChapterStartMsec(int chapter)
{
return chapterOffsets[Math.max(0, Math.min(chapter - 1, chapterOffsets.length - 1))] / 45;
}
@Override
public int getNumTitles()
{
return bdp.getNumPlaylists();
}
@Override
public int getNumChapters()
{
return chapterOffsets.length;
}
@Override
public int getChapter(long pts45)
{
for (int i = 0; i < chapterOffsets.length; i++)
if (chapterOffsets[i] > pts45)
return i;
return chapterOffsets.length;
}
@Override
public String getTitleDesc(int titleNum)
{
return bdp.getPlaylistDesc(titleNum - 1);
}
@Override
public int getNumAngles()
{
return currPlaylist.playlistItems[currFileIndex].itemClips.length;
}
@Override
public long transferTo(long position, long count, WritableByteChannel target) throws IOException
{
// Because we are looping here, we need to be sure we won't do it endlessly; especially when new
// data is unlikely to present itself.
long remaining = size() - position;
if (remaining < count)
count = remaining;
long bytesRead = 0;
try
{
while (bytesRead < count)
{
ensureProperFile(false, position);
bytesRead += sageFileChannel.transferTo(position + bytesRead - fileOffsets[currFileIndex], count - bytesRead, target);
}
}
finally
{
ensureProperFile(false);
}
return bytesRead;
}
@Override
public long transferTo(long count, WritableByteChannel target) throws IOException
{
ensureProperFile(false);
long bytesRead = sageFileChannel.transferTo(count, target);
if (bytesRead < count)
{
ensureProperFile(false);
bytesRead += sageFileChannel.transferTo(count - bytesRead, target);
}
return bytesRead;
}
@Override
public void openFile(String name) throws IOException
{
sageFileChannel.openFile(name);
}
@Override
public void openFile(File file) throws IOException
{
sageFileChannel.openFile(file);
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public long transferFrom(ReadableByteChannel src, long position, long count) throws IOException
{
throw new NonWritableChannelException();
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public long transferFrom(ReadableByteChannel src, long count) throws IOException
{
throw new NonWritableChannelException();
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public int write(ByteBuffer src, long position) throws IOException
{
throw new NonWritableChannelException();
}
/**
* Unsupported method.
*
* @throws IOException throws method is unsupported.
*/
@Override
public MappedByteBuffer map(MapMode mode, long position, long size) throws IOException
{
throw new IOException("Method is unsupported.");
}
/**
* Unsupported method.
*
* @throws IOException throws method is unsupported.
*/
@Override
public FileLock lock(long position, long size, boolean shared) throws IOException
{
throw new IOException("Method is unsupported.");
}
/**
* Unsupported method.
*
* @throws IOException throws method is unsupported.
*/
@Override
public FileLock tryLock(long position, long size, boolean shared) throws IOException
{
throw new IOException("Method is unsupported.");
}
@Override
public int read(ByteBuffer dst, long position) throws IOException
{
int readBytes = 0;
int count = (int)Math.min(size() - position, (long)dst.remaining());
try
{
while (count > readBytes)
{
ensureProperFile(false, position);
readBytes += sageFileChannel.read(dst, position + readBytes - fileOffsets[currFileIndex]);
}
}
finally
{
ensureProperFile(false);
}
return readBytes;
}
@Override
public int read(ByteBuffer dst) throws IOException
{
ensureProperFile(false);
int readBytes = sageFileChannel.read(dst);
if (dst.hasRemaining())
{
ensureProperFile(false);
if (readBytes == -1)
readBytes = 0;
readBytes += sageFileChannel.read(dst);
}
return readBytes;
}
@Override
public int readUnsignedByte() throws IOException
{
ensureProperFile(false);
return sageFileChannel.readUnsignedByte();
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public int write(ByteBuffer src) throws IOException
{
throw new NonWritableChannelException();
}
@Override
public long position()
{
return sageFileChannel.position() + fileOffsets[currFileIndex];
}
@Override
public long skip(long n) throws IOException
{
if (n < 0)
return 0;
long pos = position();
long seek = Math.min(pos + n, totalSize);
position(seek);
return seek - pos;
}
@Override
public FileChannel position(long newPosition) throws IOException
{
ensureProperFile(false, newPosition);
sageFileChannel.position(newPosition - fileOffsets[currFileIndex]);
return this;
}
@Override
public long size() throws IOException
{
return totalSize;
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public FileChannel truncate(long size) throws IOException
{
throw new NonWritableChannelException();
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public void force(boolean metaData) throws IOException
{
throw new NonWritableChannelException();
}
/**
* A bluray is always read only, so this always returns <code>true</code>.
*
* @return <code>true</code>
*/
@Override
public boolean isReadOnly()
{
return true;
}
/**
* Unsupported method. This channel can only be read only.
*
* @throws IOException throws channel is read only.
*/
@Override
public long write(ByteBuffer[] srcs, int offset, int length) throws IOException
{
throw new NonWritableChannelException();
}
@Override
public long read(ByteBuffer[] dsts, int offset, int length) throws IOException
{
ensureProperFile(false);
long bytesRead = sageFileChannel.read(dsts, offset, length);
if (bytesRead == -1)
{
ensureProperFile(false);
bytesRead = sageFileChannel.read(dsts, offset, length);
}
return bytesRead;
}
@Override
protected void implCloseChannel() throws IOException
{
sageFileChannel.close();
}
}
| |
/**
* Copyright (c) 2004-2011 Wang Jinbao(Julian Wong), http://www.ralasafe.com
* Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
*/
/*
* This class was automatically generated with
* <a href="http://www.castor.org">Castor 1.2</a>, using an XML
* Schema.
* $Id: Operand.java,v 1.1 2010/07/09 08:17:05 back Exp $
*/
package org.ralasafe.db.sql.xml;
//---------------------------------/
//- Imported classes and packages -/
//---------------------------------/
import org.exolab.castor.xml.Marshaller;
import org.exolab.castor.xml.Unmarshaller;
/**
* Class Operand.
*
* @version $Revision: 1.1 $ $Date: 2010/07/09 08:17:05 $
*/
public class Operand implements java.io.Serializable {
//--------------------------/
//- Class/Member Variables -/
//--------------------------/
/**
* Internal choice value storage
*/
private java.lang.Object _choiceValue;
/**
* Field _column.
*/
private org.ralasafe.db.sql.xml.Column _column;
/**
* Field _value.
*/
private org.ralasafe.db.sql.xml.Value _value;
/**
* Field _query.
*/
private org.ralasafe.db.sql.xml.Query _query;
//----------------/
//- Constructors -/
//----------------/
public Operand() {
super();
}
//-----------/
//- Methods -/
//-----------/
/**
* Returns the value of field 'choiceValue'. The field
* 'choiceValue' has the following description: Internal choice
* value storage
*
* @return the value of field 'ChoiceValue'.
*/
public java.lang.Object getChoiceValue(
) {
return this._choiceValue;
}
/**
* Returns the value of field 'column'.
*
* @return the value of field 'Column'.
*/
public org.ralasafe.db.sql.xml.Column getColumn(
) {
return this._column;
}
/**
* Returns the value of field 'query'.
*
* @return the value of field 'Query'.
*/
public org.ralasafe.db.sql.xml.Query getQuery(
) {
return this._query;
}
/**
* Returns the value of field 'value'.
*
* @return the value of field 'Value'.
*/
public org.ralasafe.db.sql.xml.Value getValue(
) {
return this._value;
}
/**
* Method isValid.
*
* @return true if this object is valid according to the schema
*/
public boolean isValid(
) {
try {
validate();
} catch (org.exolab.castor.xml.ValidationException vex) {
return false;
}
return true;
}
/**
*
*
* @param out
* @throws org.exolab.castor.xml.MarshalException if object is
* null or if any SAXException is thrown during marshaling
* @throws org.exolab.castor.xml.ValidationException if this
* object is an invalid instance according to the schema
*/
public void marshal(
final java.io.Writer out)
throws org.exolab.castor.xml.MarshalException, org.exolab.castor.xml.ValidationException {
Marshaller.marshal(this, out);
}
/**
*
*
* @param handler
* @throws java.io.IOException if an IOException occurs during
* marshaling
* @throws org.exolab.castor.xml.ValidationException if this
* object is an invalid instance according to the schema
* @throws org.exolab.castor.xml.MarshalException if object is
* null or if any SAXException is thrown during marshaling
*/
public void marshal(
final org.xml.sax.ContentHandler handler)
throws java.io.IOException, org.exolab.castor.xml.MarshalException, org.exolab.castor.xml.ValidationException {
Marshaller.marshal(this, handler);
}
/**
* Sets the value of field 'column'.
*
* @param column the value of field 'column'.
*/
public void setColumn(
final org.ralasafe.db.sql.xml.Column column) {
this._column = column;
this._choiceValue = column;
}
/**
* Sets the value of field 'query'.
*
* @param query the value of field 'query'.
*/
public void setQuery(
final org.ralasafe.db.sql.xml.Query query) {
this._query = query;
this._choiceValue = query;
}
/**
* Sets the value of field 'value'.
*
* @param value the value of field 'value'.
*/
public void setValue(
final org.ralasafe.db.sql.xml.Value value) {
this._value = value;
this._choiceValue = value;
}
/**
* Method unmarshal.
*
* @param reader
* @throws org.exolab.castor.xml.MarshalException if object is
* null or if any SAXException is thrown during marshaling
* @throws org.exolab.castor.xml.ValidationException if this
* object is an invalid instance according to the schema
* @return the unmarshaled org.ralasafe.db.sql.xml.Operand
*/
public static org.ralasafe.db.sql.xml.Operand unmarshal(
final java.io.Reader reader)
throws org.exolab.castor.xml.MarshalException, org.exolab.castor.xml.ValidationException {
return (org.ralasafe.db.sql.xml.Operand) Unmarshaller.unmarshal(org.ralasafe.db.sql.xml.Operand.class, reader);
}
/**
*
*
* @throws org.exolab.castor.xml.ValidationException if this
* object is an invalid instance according to the schema
*/
public void validate(
)
throws org.exolab.castor.xml.ValidationException {
org.exolab.castor.xml.Validator validator = new org.exolab.castor.xml.Validator();
validator.validate(this);
}
}
| |
package abi44_0_0.host.exp.exponent.modules.api.reanimated;
import android.util.SparseArray;
import android.view.View;
import abi44_0_0.com.facebook.react.bridge.Arguments;
import abi44_0_0.com.facebook.react.bridge.Callback;
import abi44_0_0.com.facebook.react.bridge.GuardedRunnable;
import abi44_0_0.com.facebook.react.bridge.JSApplicationIllegalArgumentException;
import abi44_0_0.com.facebook.react.bridge.JavaOnlyMap;
import abi44_0_0.com.facebook.react.bridge.ReactApplicationContext;
import abi44_0_0.com.facebook.react.bridge.ReactContext;
import abi44_0_0.com.facebook.react.bridge.ReadableArray;
import abi44_0_0.com.facebook.react.bridge.ReadableMap;
import abi44_0_0.com.facebook.react.bridge.UiThreadUtil;
import abi44_0_0.com.facebook.react.bridge.WritableMap;
import abi44_0_0.com.facebook.react.modules.core.DeviceEventManagerModule;
import abi44_0_0.com.facebook.react.modules.core.ReactChoreographer;
import abi44_0_0.com.facebook.react.uimanager.GuardedFrameCallback;
import abi44_0_0.com.facebook.react.uimanager.IllegalViewOperationException;
import abi44_0_0.com.facebook.react.uimanager.ReactShadowNode;
import abi44_0_0.com.facebook.react.uimanager.ReactStylesDiffMap;
import abi44_0_0.com.facebook.react.uimanager.UIImplementation;
import abi44_0_0.com.facebook.react.uimanager.UIManagerModule;
import abi44_0_0.com.facebook.react.uimanager.UIManagerReanimatedHelper;
import abi44_0_0.com.facebook.react.uimanager.events.Event;
import abi44_0_0.com.facebook.react.uimanager.events.EventDispatcherListener;
import abi44_0_0.com.facebook.react.uimanager.events.RCTEventEmitter;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.layoutReanimation.AnimationsManager;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.AlwaysNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.BezierNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.BlockNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.CallFuncNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.ClockNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.ClockOpNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.ConcatNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.CondNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.DebugNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.EventNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.FunctionNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.JSCallNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.Node;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.NoopNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.OperatorNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.ParamNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.PropsNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.SetNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.StyleNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.TransformNode;
import abi44_0_0.host.exp.exponent.modules.api.reanimated.nodes.ValueNode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nullable;
public class NodesManager implements EventDispatcherListener {
private static final Double ZERO = Double.valueOf(0);
public void scrollTo(int viewTag, double x, double y, boolean animated) {
View view;
try {
view = mUIManager.resolveView(viewTag);
} catch (IllegalViewOperationException e) {
e.printStackTrace();
return;
}
NativeMethodsHelper.scrollTo(view, x, y, animated);
}
public float[] measure(int viewTag) {
View view;
try {
view = mUIManager.resolveView(viewTag);
} catch (IllegalViewOperationException e) {
e.printStackTrace();
return (new float[] {});
}
return NativeMethodsHelper.measure(view);
}
public interface OnAnimationFrame {
void onAnimationFrame(double timestampMs);
}
private AnimationsManager mAnimationManager = null;
private final SparseArray<Node> mAnimatedNodes = new SparseArray<>();
private final Map<String, EventNode> mEventMapping = new HashMap<>();
private final UIImplementation mUIImplementation;
private final DeviceEventManagerModule.RCTDeviceEventEmitter mEventEmitter;
private final ReactChoreographer mReactChoreographer;
private final GuardedFrameCallback mChoreographerCallback;
protected final UIManagerModule.CustomEventNamesResolver mCustomEventNamesResolver;
private final AtomicBoolean mCallbackPosted = new AtomicBoolean();
private final NoopNode mNoopNode;
private final ReactContext mContext;
private final UIManagerModule mUIManager;
private RCTEventEmitter mCustomEventHandler;
private List<OnAnimationFrame> mFrameCallbacks = new ArrayList<>();
private ConcurrentLinkedQueue<CopiedEvent> mEventQueue = new ConcurrentLinkedQueue<>();
private boolean mWantRunUpdates;
public double currentFrameTimeMs;
public final UpdateContext updateContext;
public Set<String> uiProps = Collections.emptySet();
public Set<String> nativeProps = Collections.emptySet();
public NativeProxy getNativeProxy() {
return mNativeProxy;
}
private NativeProxy mNativeProxy;
public AnimationsManager getAnimationsManager() {
return mAnimationManager;
}
public void onCatalystInstanceDestroy() {
if (mAnimationManager != null) {
mAnimationManager.onCatalystInstanceDestroy();
}
if (mNativeProxy != null) {
mNativeProxy.onCatalystInstanceDestroy();
mNativeProxy = null;
}
}
public void initWithContext(ReactApplicationContext reactApplicationContext) {
mNativeProxy = new NativeProxy(reactApplicationContext);
mAnimationManager.setScheduler(getNativeProxy().getScheduler());
}
private final class NativeUpdateOperation {
public int mViewTag;
public WritableMap mNativeProps;
public NativeUpdateOperation(int viewTag, WritableMap nativeProps) {
mViewTag = viewTag;
mNativeProps = nativeProps;
}
}
private Queue<NativeUpdateOperation> mOperationsInBatch = new LinkedList<>();
private boolean mTryRunBatchUpdatesSynchronously = false;
public NodesManager(ReactContext context) {
mContext = context;
mUIManager = context.getNativeModule(UIManagerModule.class);
updateContext = new UpdateContext();
mUIImplementation = mUIManager.getUIImplementation();
mCustomEventNamesResolver = mUIManager.getDirectEventNamesResolver();
mEventEmitter = context.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class);
mReactChoreographer = ReactChoreographer.getInstance();
mChoreographerCallback =
new GuardedFrameCallback(context) {
@Override
protected void doFrameGuarded(long frameTimeNanos) {
onAnimationFrame(frameTimeNanos);
}
};
mNoopNode = new NoopNode(this);
// We register as event listener at the end, because we pass `this` and we haven't finished
// contructing an object yet.
// This lead to a crash described in
// https://github.com/software-mansion/react-native-reanimated/issues/604 which was caused by
// Nodes Manager being constructed on UI thread and registering for events.
// Events are handled in the native modules thread in the `onEventDispatch()` method.
// This method indirectly uses `mChoreographerCallback` which was created after event
// registration, creating race condition
mUIManager.getEventDispatcher().addListener(this);
mAnimationManager = new AnimationsManager(mContext, mUIImplementation, mUIManager);
}
public void onHostPause() {
if (mCallbackPosted.get()) {
stopUpdatingOnAnimationFrame();
mCallbackPosted.set(true);
}
}
public void onHostResume() {
if (mCallbackPosted.getAndSet(false)) {
startUpdatingOnAnimationFrame();
}
}
public void startUpdatingOnAnimationFrame() {
if (!mCallbackPosted.getAndSet(true)) {
mReactChoreographer.postFrameCallback(
ReactChoreographer.CallbackType.NATIVE_ANIMATED_MODULE, mChoreographerCallback);
}
}
private void stopUpdatingOnAnimationFrame() {
if (mCallbackPosted.getAndSet(false)) {
mReactChoreographer.removeFrameCallback(
ReactChoreographer.CallbackType.NATIVE_ANIMATED_MODULE, mChoreographerCallback);
}
}
private void performOperations() {
if (!mOperationsInBatch.isEmpty()) {
final Queue<NativeUpdateOperation> copiedOperationsQueue = mOperationsInBatch;
mOperationsInBatch = new LinkedList<>();
final boolean trySynchronously = mTryRunBatchUpdatesSynchronously;
mTryRunBatchUpdatesSynchronously = false;
final Semaphore semaphore = new Semaphore(0);
mContext.runOnNativeModulesQueueThread(
new GuardedRunnable(mContext.getExceptionHandler()) {
@Override
public void runGuarded() {
boolean queueWasEmpty =
UIManagerReanimatedHelper.isOperationQueueEmpty(mUIImplementation);
boolean shouldDispatchUpdates = trySynchronously && queueWasEmpty;
if (!shouldDispatchUpdates) {
semaphore.release();
}
while (!copiedOperationsQueue.isEmpty()) {
NativeUpdateOperation op = copiedOperationsQueue.remove();
ReactShadowNode shadowNode = mUIImplementation.resolveShadowNode(op.mViewTag);
if (shadowNode != null) {
mUIManager.updateView(op.mViewTag, shadowNode.getViewClass(), op.mNativeProps);
}
}
if (queueWasEmpty) {
mUIImplementation.dispatchViewUpdates(-1); // no associated batchId
}
if (shouldDispatchUpdates) {
semaphore.release();
}
}
});
if (trySynchronously) {
while (true) {
try {
semaphore.acquire();
break;
} catch (InterruptedException e) {
// noop
}
}
}
}
}
private void onAnimationFrame(long frameTimeNanos) {
currentFrameTimeMs = frameTimeNanos / 1000000.;
while (!mEventQueue.isEmpty()) {
CopiedEvent copiedEvent = mEventQueue.poll();
handleEvent(copiedEvent.getTargetTag(), copiedEvent.getEventName(), copiedEvent.getPayload());
}
if (!mFrameCallbacks.isEmpty()) {
List<OnAnimationFrame> frameCallbacks = mFrameCallbacks;
mFrameCallbacks = new ArrayList<>(frameCallbacks.size());
for (int i = 0, size = frameCallbacks.size(); i < size; i++) {
frameCallbacks.get(i).onAnimationFrame(currentFrameTimeMs);
}
}
if (mWantRunUpdates) {
Node.runUpdates(updateContext);
}
performOperations();
mCallbackPosted.set(false);
mWantRunUpdates = false;
if (!mFrameCallbacks.isEmpty() || !mEventQueue.isEmpty()) {
// enqueue next frame
startUpdatingOnAnimationFrame();
}
}
/**
* Null-safe way of getting node's value. If node is not present we return 0. This also matches
* iOS behavior when the app won't just crash.
*/
public Object getNodeValue(int nodeID) {
Node node = mAnimatedNodes.get(nodeID);
if (node != null) {
return node.value();
}
return ZERO;
}
/**
* Null-safe way of getting node reference. This method always returns non-null instance. If the
* node is not present we try to return a "no-op" node that allows for "set" calls and always
* returns 0 as a value.
*/
public <T extends Node> T findNodeById(int id, Class<T> type) {
Node node = mAnimatedNodes.get(id);
if (node == null) {
if (type == Node.class || type == ValueNode.class) {
return (T) mNoopNode;
}
throw new IllegalArgumentException(
"Requested node with id " + id + " of type " + type + " cannot be found");
}
if (type.isInstance(node)) {
return (T) node;
}
throw new IllegalArgumentException(
"Node with id "
+ id
+ " is of incompatible type "
+ node.getClass()
+ ", requested type was "
+ type);
}
public void createNode(int nodeID, ReadableMap config) {
if (mAnimatedNodes.get(nodeID) != null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with ID " + nodeID + " already exists");
}
String type = config.getString("type");
final Node node;
if ("props".equals(type)) {
node = new PropsNode(nodeID, config, this, mUIImplementation);
} else if ("style".equals(type)) {
node = new StyleNode(nodeID, config, this);
} else if ("transform".equals(type)) {
node = new TransformNode(nodeID, config, this);
} else if ("value".equals(type)) {
node = new ValueNode(nodeID, config, this);
} else if ("block".equals(type)) {
node = new BlockNode(nodeID, config, this);
} else if ("cond".equals(type)) {
node = new CondNode(nodeID, config, this);
} else if ("op".equals(type)) {
node = new OperatorNode(nodeID, config, this);
} else if ("set".equals(type)) {
node = new SetNode(nodeID, config, this);
} else if ("debug".equals(type)) {
node = new DebugNode(nodeID, config, this);
} else if ("clock".equals(type)) {
node = new ClockNode(nodeID, config, this);
} else if ("clockStart".equals(type)) {
node = new ClockOpNode.ClockStartNode(nodeID, config, this);
} else if ("clockStop".equals(type)) {
node = new ClockOpNode.ClockStopNode(nodeID, config, this);
} else if ("clockTest".equals(type)) {
node = new ClockOpNode.ClockTestNode(nodeID, config, this);
} else if ("call".equals(type)) {
node = new JSCallNode(nodeID, config, this);
} else if ("bezier".equals(type)) {
node = new BezierNode(nodeID, config, this);
} else if ("event".equals(type)) {
node = new EventNode(nodeID, config, this);
} else if ("always".equals(type)) {
node = new AlwaysNode(nodeID, config, this);
} else if ("concat".equals(type)) {
node = new ConcatNode(nodeID, config, this);
} else if ("param".equals(type)) {
node = new ParamNode(nodeID, config, this);
} else if ("func".equals(type)) {
node = new FunctionNode(nodeID, config, this);
} else if ("callfunc".equals(type)) {
node = new CallFuncNode(nodeID, config, this);
} else {
throw new JSApplicationIllegalArgumentException("Unsupported node type: " + type);
}
mAnimatedNodes.put(nodeID, node);
}
public void dropNode(int tag) {
Node node = mAnimatedNodes.get(tag);
if (node != null) {
node.onDrop();
}
mAnimatedNodes.remove(tag);
}
public void connectNodes(int parentID, int childID) {
Node parentNode = mAnimatedNodes.get(parentID);
Node childNode = mAnimatedNodes.get(childID);
if (childNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with ID " + childID + " does not exists");
}
parentNode.addChild(childNode);
}
public void disconnectNodes(int parentID, int childID) {
Node parentNode = mAnimatedNodes.get(parentID);
Node childNode = mAnimatedNodes.get(childID);
if (childNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with ID " + childID + " does not exists");
}
parentNode.removeChild(childNode);
}
public void connectNodeToView(int nodeID, int viewTag) {
Node node = mAnimatedNodes.get(nodeID);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with ID " + nodeID + " does not exists");
}
if (!(node instanceof PropsNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to view should be" + "of type " + PropsNode.class.getName());
}
((PropsNode) node).connectToView(viewTag);
}
public void disconnectNodeFromView(int nodeID, int viewTag) {
Node node = mAnimatedNodes.get(nodeID);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with ID " + nodeID + " does not exists");
}
if (!(node instanceof PropsNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to view should be" + "of type " + PropsNode.class.getName());
}
((PropsNode) node).disconnectFromView(viewTag);
}
public void enqueueUpdateViewOnNativeThread(
int viewTag, WritableMap nativeProps, boolean trySynchronously) {
if (trySynchronously) {
mTryRunBatchUpdatesSynchronously = true;
}
mOperationsInBatch.add(new NativeUpdateOperation(viewTag, nativeProps));
}
public void attachEvent(int viewTag, String eventName, int eventNodeID) {
String key = viewTag + eventName;
EventNode node = (EventNode) mAnimatedNodes.get(eventNodeID);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Event node " + eventNodeID + " does not exists");
}
if (mEventMapping.containsKey(key)) {
throw new JSApplicationIllegalArgumentException(
"Event handler already set for the given view and event type");
}
mEventMapping.put(key, node);
}
public void detachEvent(int viewTag, String eventName, int eventNodeID) {
String key = viewTag + eventName;
mEventMapping.remove(key);
}
public void configureProps(Set<String> nativePropsSet, Set<String> uiPropsSet) {
nativeProps = nativePropsSet;
uiProps = uiPropsSet;
}
public void getValue(int nodeID, Callback callback) {
callback.invoke(mAnimatedNodes.get(nodeID).value());
}
public void postRunUpdatesAfterAnimation() {
mWantRunUpdates = true;
startUpdatingOnAnimationFrame();
}
public void postOnAnimation(OnAnimationFrame onAnimationFrame) {
mFrameCallbacks.add(onAnimationFrame);
startUpdatingOnAnimationFrame();
}
@Override
public void onEventDispatch(Event event) {
// Events can be dispatched from any thread so we have to make sure handleEvent is run from the
// UI thread.
if (UiThreadUtil.isOnUiThread()) {
handleEvent(event);
performOperations();
} else {
boolean shouldSaveEvent = false;
String eventName = mCustomEventNamesResolver.resolveCustomEventName(event.getEventName());
int viewTag = event.getViewTag();
String key = viewTag + eventName;
shouldSaveEvent |=
(mCustomEventHandler != null
&& mNativeProxy != null
&& mNativeProxy.isAnyHandlerWaitingForEvent(key));
if (shouldSaveEvent) {
mEventQueue.offer(new CopiedEvent(event));
}
startUpdatingOnAnimationFrame();
}
}
private void handleEvent(Event event) {
// If the event has a different name in native, convert it to it's JS name.
String eventName = mCustomEventNamesResolver.resolveCustomEventName(event.getEventName());
int viewTag = event.getViewTag();
String key = viewTag + eventName;
if (mCustomEventHandler != null) {
event.dispatch(mCustomEventHandler);
}
if (!mEventMapping.isEmpty()) {
EventNode node = mEventMapping.get(key);
if (node != null) {
event.dispatch(node);
}
}
}
private void handleEvent(int targetTag, String eventName, @Nullable WritableMap event) {
if (mCustomEventHandler != null) {
mCustomEventHandler.receiveEvent(targetTag, eventName, event);
}
String key = targetTag + eventName;
if (!mEventMapping.isEmpty()) {
EventNode node = mEventMapping.get(key);
if (node != null) {
node.receiveEvent(targetTag, eventName, event);
}
}
}
public UIManagerModule.CustomEventNamesResolver getEventNameResolver() {
return mCustomEventNamesResolver;
}
public void registerEventHandler(RCTEventEmitter handler) {
mCustomEventHandler = handler;
}
public void sendEvent(String name, WritableMap body) {
mEventEmitter.emit(name, body);
}
public void setValue(int nodeID, Double newValue) {
Node node = mAnimatedNodes.get(nodeID);
if (node != null) {
((ValueNode) node).setValue(newValue);
}
}
public void updateProps(int viewTag, Map<String, Object> props) {
// TODO: update PropsNode to use this method instead of its own way of updating props
boolean hasUIProps = false;
boolean hasNativeProps = false;
boolean hasJSProps = false;
JavaOnlyMap newUIProps = new JavaOnlyMap();
WritableMap newJSProps = Arguments.createMap();
WritableMap newNativeProps = Arguments.createMap();
for (Map.Entry<String, Object> entry : props.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (uiProps.contains(key)) {
hasUIProps = true;
addProp(newUIProps, key, value);
} else if (nativeProps.contains(key)) {
hasNativeProps = true;
addProp(newNativeProps, key, value);
} else {
hasJSProps = true;
addProp(newJSProps, key, value);
}
}
if (viewTag != View.NO_ID) {
if (hasUIProps) {
mUIImplementation.synchronouslyUpdateViewOnUIThread(
viewTag, new ReactStylesDiffMap(newUIProps));
}
if (hasNativeProps) {
enqueueUpdateViewOnNativeThread(viewTag, newNativeProps, true);
}
if (hasJSProps) {
WritableMap evt = Arguments.createMap();
evt.putInt("viewTag", viewTag);
evt.putMap("props", newJSProps);
sendEvent("onReanimatedPropsChange", evt);
}
}
}
public String obtainProp(int viewTag, String propName) {
View view = mUIManager.resolveView(viewTag);
String result =
"error: unknown propName " + propName + ", currently supported: opacity, zIndex";
if (propName.equals("opacity")) {
Float opacity = view.getAlpha();
result = Float.toString(opacity);
} else if (propName.equals("zIndex")) {
Float zIndex = view.getElevation();
result = Float.toString(zIndex);
}
return result;
}
private static void addProp(WritableMap propMap, String key, Object value) {
if (value == null) {
propMap.putNull(key);
} else if (value instanceof Double) {
propMap.putDouble(key, (Double) value);
} else if (value instanceof Integer) {
propMap.putInt(key, (Integer) value);
} else if (value instanceof Number) {
propMap.putDouble(key, ((Number) value).doubleValue());
} else if (value instanceof Boolean) {
propMap.putBoolean(key, (Boolean) value);
} else if (value instanceof String) {
propMap.putString(key, (String) value);
} else if (value instanceof ReadableArray) {
propMap.putArray(key, (ReadableArray) value);
} else if (value instanceof ReadableMap) {
propMap.putMap(key, (ReadableMap) value);
} else {
throw new IllegalStateException("Unknown type of animated value");
}
}
}
| |
/*
* Copyright 2014-2022 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.agent;
import org.agrona.Strings;
import org.agrona.concurrent.UnsafeBuffer;
import org.agrona.concurrent.ringbuffer.ManyToOneRingBuffer;
import java.util.EnumSet;
import java.util.Set;
import java.util.function.Function;
import static io.aeron.agent.DriverEventCode.*;
import static java.lang.System.err;
import static java.lang.System.lineSeparator;
import static org.agrona.BitUtil.CACHE_LINE_LENGTH;
import static org.agrona.BufferUtil.allocateDirectAligned;
import static org.agrona.SystemUtil.getSizeAsInt;
import static org.agrona.concurrent.ringbuffer.RingBufferDescriptor.TRAILER_LENGTH;
/**
* Common configuration elements between event loggers and event reader side.
*/
final class EventConfiguration
{
/**
* Event buffer length system property name.
*/
public static final String BUFFER_LENGTH_PROP_NAME = "aeron.event.buffer.length";
/**
* Event codes for admin events within the driver, i.e. does not include frame capture and name resolution
* events.
*/
public static final Set<DriverEventCode> ADMIN_ONLY_EVENT_CODES = EnumSet.complementOf(EnumSet.of(
FRAME_IN,
FRAME_OUT,
NAME_RESOLUTION_NEIGHBOR_ADDED,
NAME_RESOLUTION_NEIGHBOR_REMOVED));
/**
* Event Buffer default length (in bytes).
*/
public static final int BUFFER_LENGTH_DEFAULT = 8 * 1024 * 1024;
/**
* Maximum length of an event in bytes.
*/
public static final int MAX_EVENT_LENGTH = 4096 - lineSeparator().length();
/**
* Iteration limit for event reader loop.
*/
public static final int EVENT_READER_FRAME_LIMIT = 20;
/**
* Ring Buffer to use for logging that will be read by {@link ConfigOption#READER_CLASSNAME}.
*/
public static final ManyToOneRingBuffer EVENT_RING_BUFFER;
static
{
EVENT_RING_BUFFER = new ManyToOneRingBuffer(new UnsafeBuffer(allocateDirectAligned(
getSizeAsInt(BUFFER_LENGTH_PROP_NAME, BUFFER_LENGTH_DEFAULT) + TRAILER_LENGTH, CACHE_LINE_LENGTH)));
}
public static final EnumSet<DriverEventCode> DRIVER_EVENT_CODES = EnumSet.noneOf(DriverEventCode.class);
public static final EnumSet<ArchiveEventCode> ARCHIVE_EVENT_CODES = EnumSet.noneOf(ArchiveEventCode.class);
public static final EnumSet<ClusterEventCode> CLUSTER_EVENT_CODES = EnumSet.noneOf(ClusterEventCode.class);
private EventConfiguration()
{
}
static void init(
final String enabledDriverEvents,
final String disabledDriverEvents,
final String enabledArchiveEvents,
final String disabledArchiveEvents,
final String enabledClusterEvents,
final String disabledClusterEvents)
{
DRIVER_EVENT_CODES.clear();
DRIVER_EVENT_CODES.addAll(getDriverEventCodes(enabledDriverEvents));
DRIVER_EVENT_CODES.removeAll(getDriverEventCodes(disabledDriverEvents));
ARCHIVE_EVENT_CODES.clear();
ARCHIVE_EVENT_CODES.addAll(getArchiveEventCodes(enabledArchiveEvents));
ARCHIVE_EVENT_CODES.removeAll(getArchiveEventCodes(disabledArchiveEvents));
CLUSTER_EVENT_CODES.clear();
CLUSTER_EVENT_CODES.addAll(getClusterEventCodes(enabledClusterEvents));
CLUSTER_EVENT_CODES.removeAll(getClusterEventCodes(disabledClusterEvents));
}
/**
* Reset configuration back to clear state.
*/
static void reset()
{
DRIVER_EVENT_CODES.clear();
ARCHIVE_EVENT_CODES.clear();
CLUSTER_EVENT_CODES.clear();
EVENT_RING_BUFFER.unblock();
}
/**
* Get the {@link Set} of {@link ArchiveEventCode}s that are enabled for the logger.
*
* @param enabledEventCodes that can be "all" or a comma separated list of Event Code ids or names.
* @return the {@link Set} of {@link ArchiveEventCode}s that are enabled for the logger.
*/
static EnumSet<ArchiveEventCode> getArchiveEventCodes(final String enabledEventCodes)
{
if (Strings.isEmpty(enabledEventCodes))
{
return EnumSet.noneOf(ArchiveEventCode.class);
}
final Function<Integer, ArchiveEventCode> eventCodeById = ArchiveEventCode::get;
final Function<String, ArchiveEventCode> eventCodeByName = ArchiveEventCode::valueOf;
return parseEventCodes(ArchiveEventCode.class, enabledEventCodes, eventCodeById, eventCodeByName);
}
/**
* Get the {@link Set} of {@link ClusterEventCode}s that are enabled for the logger.
*
* @param enabledEventCodes that can be "all" or a comma separated list of Event Code ids or names.
* @return the {@link Set} of {@link ClusterEventCode}s that are enabled for the logger.
*/
static EnumSet<ClusterEventCode> getClusterEventCodes(final String enabledEventCodes)
{
if (Strings.isEmpty(enabledEventCodes))
{
return EnumSet.noneOf(ClusterEventCode.class);
}
final Function<Integer, ClusterEventCode> eventCodeById = ClusterEventCode::get;
final Function<String, ClusterEventCode> eventCodeByName = ClusterEventCode::valueOf;
return parseEventCodes(ClusterEventCode.class, enabledEventCodes, eventCodeById, eventCodeByName);
}
/**
* Get the {@link Set} of {@link DriverEventCode}s that are enabled for the logger.
*
* @param enabledEventCodes that can be "all", "admin", or a comma separated list of Event Code ids or names.
* @return the {@link Set} of {@link DriverEventCode}s that are enabled for the logger.
*/
static EnumSet<DriverEventCode> getDriverEventCodes(final String enabledEventCodes)
{
if (Strings.isEmpty(enabledEventCodes))
{
return EnumSet.noneOf(DriverEventCode.class);
}
final EnumSet<DriverEventCode> eventCodeSet = EnumSet.noneOf(DriverEventCode.class);
final String[] codeIds = enabledEventCodes.split(",");
for (final String codeId : codeIds)
{
switch (codeId)
{
case "all":
return EnumSet.allOf(DriverEventCode.class);
case "admin":
eventCodeSet.addAll(ADMIN_ONLY_EVENT_CODES);
break;
default:
{
DriverEventCode code = null;
try
{
code = DriverEventCode.valueOf(codeId);
}
catch (final IllegalArgumentException ignore)
{
}
if (null == code)
{
try
{
code = DriverEventCode.get(Integer.parseInt(codeId));
}
catch (final IllegalArgumentException ignore)
{
}
}
if (null != code)
{
eventCodeSet.add(code);
}
else
{
err.println("unknown event code: " + codeId);
}
}
}
}
return eventCodeSet;
}
private static <E extends Enum<E>> EnumSet<E> parseEventCodes(
final Class<E> eventCodeType,
final String enabledEventCodes,
final Function<Integer, E> eventCodeById,
final Function<String, E> eventCodeByName)
{
final EnumSet<E> eventCodeSet = EnumSet.noneOf(eventCodeType);
final String[] codeIds = enabledEventCodes.split(",");
for (final String codeId : codeIds)
{
if ("all".equals(codeId))
{
return EnumSet.allOf(eventCodeType);
}
else
{
E code = null;
try
{
code = eventCodeByName.apply(codeId);
}
catch (final IllegalArgumentException ignore)
{
}
if (null == code)
{
try
{
code = eventCodeById.apply(Integer.parseInt(codeId));
}
catch (final IllegalArgumentException ignore)
{
}
}
if (null != code)
{
eventCodeSet.add(code);
}
else
{
err.println("unknown event code: " + codeId);
}
}
}
return eventCodeSet;
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.fs;
import alluxio.AlluxioURI;
import alluxio.AuthenticatedUserRule;
import alluxio.PropertyKey;
import alluxio.client.WriteType;
import alluxio.client.file.FileSystem;
import alluxio.client.file.options.CreateDirectoryOptions;
import alluxio.client.file.options.CreateFileOptions;
import alluxio.master.file.FileSystemMaster;
import alluxio.master.file.options.CheckConsistencyOptions;
import alluxio.testutils.BaseIntegrationTest;
import alluxio.testutils.LocalAlluxioClusterResource;
import alluxio.underfs.UnderFileSystem;
import alluxio.underfs.options.DeleteOptions;
import com.google.common.collect.Lists;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Integration test for
* {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)}.
*/
public class CheckConsistencyIntegrationTest extends BaseIntegrationTest {
private static final AlluxioURI DIRECTORY = new AlluxioURI("/dir");
private static final AlluxioURI FILE = new AlluxioURI("/dir/file");
private static final String TEST_USER = "test";
@Rule
public LocalAlluxioClusterResource mLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder().setProperty(PropertyKey.SECURITY_LOGIN_USERNAME,
TEST_USER).build();
@Rule
public AuthenticatedUserRule mAuthenticatedUser = new AuthenticatedUserRule(TEST_USER);
private FileSystemMaster mFileSystemMaster;
private FileSystem mFileSystem;
@Before
public final void before() throws Exception {
mFileSystemMaster =
mLocalAlluxioClusterResource.get().getLocalAlluxioMaster().getMasterProcess()
.getMaster(FileSystemMaster.class);
mFileSystem = FileSystem.Factory.get();
CreateDirectoryOptions dirOptions =
CreateDirectoryOptions.defaults().setWriteType(WriteType.CACHE_THROUGH);
CreateFileOptions fileOptions =
CreateFileOptions.defaults().setWriteType(WriteType.CACHE_THROUGH);
mFileSystem.createDirectory(DIRECTORY, dirOptions);
mFileSystem.createFile(FILE, fileOptions).close();
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when all files are consistent.
*/
@Test
public void consistent() throws Exception {
Assert.assertEquals(new ArrayList<AlluxioURI>(), mFileSystemMaster.checkConsistency(
new AlluxioURI("/"), CheckConsistencyOptions.defaults()));
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when no files are consistent.
*/
@Test
public void inconsistent() throws Exception {
String ufsDirectory = mFileSystem.getStatus(DIRECTORY).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsDirectory);
ufs.deleteDirectory(ufsDirectory, DeleteOptions.defaults().setRecursive(true));
List<AlluxioURI> expected = Lists.newArrayList(FILE, DIRECTORY);
List<AlluxioURI> result =
mFileSystemMaster.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults());
Collections.sort(expected);
Collections.sort(result);
Assert.assertEquals(expected, result);
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when some files are consistent.
*/
@Test
public void partiallyInconsistent() throws Exception {
String ufsFile = mFileSystem.getStatus(FILE).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsFile);
ufs.deleteFile(ufsFile);
List<AlluxioURI> expected = Lists.newArrayList(FILE);
Assert.assertEquals(expected, mFileSystemMaster
.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults()));
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when some files are consistent in a larger inode tree.
*/
@Test
public void largeTree() throws Exception {
CreateDirectoryOptions dirOptions =
CreateDirectoryOptions.defaults().setWriteType(WriteType.CACHE_THROUGH);
CreateFileOptions fileOptions =
CreateFileOptions.defaults().setWriteType(WriteType.CACHE_THROUGH);
AlluxioURI nestedDir = DIRECTORY.join("/dir2");
AlluxioURI topLevelFile = new AlluxioURI("/file");
AlluxioURI thirdLevelFile = nestedDir.join("/file");
mFileSystem.createDirectory(nestedDir, dirOptions);
mFileSystem.createFile(topLevelFile, fileOptions).close();
mFileSystem.createFile(thirdLevelFile, fileOptions).close();
String ufsDirectory = mFileSystem.getStatus(nestedDir).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsDirectory);
ufs.deleteDirectory(ufsDirectory, DeleteOptions.defaults().setRecursive(true));
List<AlluxioURI> expected = Lists.newArrayList(nestedDir, thirdLevelFile);
List<AlluxioURI> result =
mFileSystemMaster.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults());
Collections.sort(expected);
Collections.sort(result);
Assert.assertEquals(expected, result);
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when a file is not the correct size.
*/
@Test
public void incorrectFileSize() throws Exception {
String ufsFile = mFileSystem.getStatus(FILE).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsFile);
ufs.deleteFile(ufsFile);
OutputStream out = ufs.create(ufsFile);
out.write(1);
out.close();
List<AlluxioURI> expected = Lists.newArrayList(FILE);
Assert.assertEquals(expected, mFileSystemMaster
.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults()));
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when a directory does not exist as a directory in the under storage.
*/
@Test
public void notADirectory() throws Exception {
String ufsDirectory = mFileSystem.getStatus(DIRECTORY).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsDirectory);
ufs.deleteDirectory(ufsDirectory, DeleteOptions.defaults().setRecursive(true));
ufs.create(ufsDirectory).close();
List<AlluxioURI> expected = Lists.newArrayList(DIRECTORY, FILE);
List<AlluxioURI> result =
mFileSystemMaster.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults());
Collections.sort(expected);
Collections.sort(result);
Assert.assertEquals(expected, result);
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when a file does not exist as a file in the under storage.
*/
@Test
public void notAFile() throws Exception {
String ufsFile = mFileSystem.getStatus(FILE).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsFile);
ufs.deleteFile(ufsFile);
ufs.mkdirs(ufsFile);
List<AlluxioURI> expected = Lists.newArrayList(FILE);
Assert.assertEquals(expected, mFileSystemMaster
.checkConsistency(new AlluxioURI("/"), CheckConsistencyOptions.defaults()));
}
/**
* Tests the {@link FileSystemMaster#checkConsistency(AlluxioURI, CheckConsistencyOptions)} method
* when running on a file that is inconsistent.
*/
@Test
public void inconsistentFile() throws Exception {
String ufsFile = mFileSystem.getStatus(FILE).getUfsPath();
UnderFileSystem ufs = UnderFileSystem.Factory.create(ufsFile);
ufs.deleteFile(ufsFile);
List<AlluxioURI> expected = Lists.newArrayList(FILE);
Assert.assertEquals(expected, mFileSystemMaster
.checkConsistency(FILE, CheckConsistencyOptions.defaults()));
}
}
| |
package com.laytonsmith.PureUtilities.ClassLoading.ClassMirror;
import com.laytonsmith.PureUtilities.Common.ClassUtils;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* This is the superclass of any element type, such as a field or method.
*/
abstract class AbstractElementMirror implements Serializable {
/**
* Version History:
* 1 - Initial version
* 2 - Parent was added, and it cannot be null. This is an incompatible change, and
* all extensions will need to be recompiled to get the compilation caching benefit.
* (Old caches will fail, and cause a re-scan, but will work.)
*/
private static final long serialVersionUID = 2L;
/**
* Any modifiers on the element
*/
protected final ModifierMirror modifiers;
/**
* The name of the element
*/
protected final String name;
/**
* The type of the element, or in the case of methods or other
* composite types, the return type.
*/
protected final ClassReferenceMirror type;
/**
* Any annotations on the element. This isn't final, because
* the fields and methods are created before they necessarily know their annotations.
*/
protected List<AnnotationMirror> annotations;
/**
* The parent class of the element
*/
private final ClassReferenceMirror parent;
protected AbstractElementMirror(Field field){
Objects.requireNonNull(field);
this.type = ClassReferenceMirror.fromClass(field.getType());
this.modifiers = new ModifierMirror(field.getModifiers());
this.name = field.getName();
List<AnnotationMirror> list = new ArrayList<>();
for(Annotation a : field.getDeclaredAnnotations()){
list.add(new AnnotationMirror(a));
}
this.annotations = list;
this.parent = ClassReferenceMirror.fromClass(field.getDeclaringClass());
Objects.requireNonNull(this.parent);
}
protected AbstractElementMirror(Method method){
Objects.requireNonNull(method);
this.type = ClassReferenceMirror.fromClass(method.getReturnType());
this.modifiers = new ModifierMirror(method.getModifiers());
this.name = method.getName();
List<AnnotationMirror> list = new ArrayList<>();
for(Annotation a : method.getDeclaredAnnotations()){
list.add(new AnnotationMirror(a));
}
this.annotations = list;
this.parent = ClassReferenceMirror.fromClass(method.getDeclaringClass());
Objects.requireNonNull(this.parent);
}
protected AbstractElementMirror(ClassReferenceMirror parent, List<AnnotationMirror> annotations, ModifierMirror modifiers, ClassReferenceMirror type, String name){
this.annotations = annotations;
if(this.annotations == null){
this.annotations = new ArrayList<>();
}
this.modifiers = modifiers;
this.type = type;
this.name = name;
this.parent = parent;
Objects.requireNonNull(parent);
Objects.requireNonNull(modifiers);
Objects.requireNonNull(type);
Objects.requireNonNull(name);
}
/**
* Gets the modifiers on this field/method.
* @return
*/
public ModifierMirror getModifiers(){
return modifiers;
}
/**
* Gets the name of this field/method.
* @return
*/
public String getName(){
return name;
}
/**
* Gets the type of this field/method. For methods, this is
* the return type.
* @return
*/
public ClassReferenceMirror getType(){
return type;
}
/**
* Returns a list of the annotations on this field/method.
* @return
*/
public List<AnnotationMirror> getAnnotations(){
return new ArrayList<>(annotations);
}
/**
* Gets the annotation on this field/method.
* @param annotation
* @return
*/
public AnnotationMirror getAnnotation(Class<? extends Annotation> annotation){
String jvmName = ClassUtils.getJVMName(annotation);
for(AnnotationMirror a : getAnnotations()){
if(a.getType().getJVMName().equals(jvmName)){
return a;
}
}
return null;
}
/**
* Returns true if this element has the specified annotation attached to it.
* @param annotation
* @return
*/
public boolean hasAnnotation(Class<? extends Annotation> annotation){
return getAnnotation(annotation) != null;
}
/**
* Loads the corresponding Annotation type for this field
* or method. This actually loads the Annotation class into memory.
* This is equivalent to getAnnotation(type).getProxy(type), however
* this checks for null first, and returns null instead of causing a NPE.
* @param <T>
* @param type
* @return
*/
public <T extends Annotation> T loadAnnotation(Class<T> type) {
AnnotationMirror mirror = getAnnotation(type);
if(mirror == null){
return null;
}
return mirror.getProxy(type);
}
/**
* Returns the class that this is declared in.
* @return
*/
public final ClassReferenceMirror getDeclaringClass(){
return this.parent;
}
/* package */ void addAnnotation(AnnotationMirror annotation){
annotations.add(annotation);
}
@Override
public int hashCode() {
int hash = 5;
hash = 89 * hash + Objects.hashCode(this.name);
hash = 89 * hash + Objects.hashCode(this.parent);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AbstractElementMirror other = (AbstractElementMirror) obj;
if (!Objects.equals(this.name, other.name)) {
return false;
}
if (!Objects.equals(this.parent, other.parent)) {
return false;
}
return true;
}
}
| |
package edu.northwestern.bioinformatics.studycalendar.service;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarSystemException;
import edu.northwestern.bioinformatics.studycalendar.core.Fixtures;
import edu.northwestern.bioinformatics.studycalendar.core.StudyCalendarTestCase;
import edu.northwestern.bioinformatics.studycalendar.dao.SubjectDao;
import edu.northwestern.bioinformatics.studycalendar.domain.Activity;
import edu.northwestern.bioinformatics.studycalendar.domain.BlackoutDate;
import edu.northwestern.bioinformatics.studycalendar.domain.Epoch;
import edu.northwestern.bioinformatics.studycalendar.domain.Gender;
import edu.northwestern.bioinformatics.studycalendar.domain.NextStudySegmentMode;
import edu.northwestern.bioinformatics.studycalendar.domain.Period;
import edu.northwestern.bioinformatics.studycalendar.domain.PlanTreeNode;
import edu.northwestern.bioinformatics.studycalendar.domain.PlannedActivity;
import edu.northwestern.bioinformatics.studycalendar.domain.Population;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledActivity;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledActivityMode;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledActivityState;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledCalendar;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledStudySegment;
import edu.northwestern.bioinformatics.studycalendar.domain.Site;
import edu.northwestern.bioinformatics.studycalendar.domain.SpecificDateBlackout;
import edu.northwestern.bioinformatics.studycalendar.domain.Study;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySegment;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySite;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySubjectAssignment;
import edu.northwestern.bioinformatics.studycalendar.domain.Subject;
import edu.northwestern.bioinformatics.studycalendar.domain.delta.Amendment;
import edu.northwestern.bioinformatics.studycalendar.domain.delta.AmendmentApproval;
import edu.northwestern.bioinformatics.studycalendar.security.authorization.AuthorizationObjectFactory;
import edu.northwestern.bioinformatics.studycalendar.security.authorization.PscUser;
import edu.northwestern.bioinformatics.studycalendar.service.presenter.Registration;
import edu.nwu.bioinformatics.commons.DateUtils;
import edu.nwu.bioinformatics.commons.testing.CoreTestCase;
import gov.nih.nci.cabig.ctms.lang.DateTools;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import static edu.northwestern.bioinformatics.studycalendar.core.Fixtures.*;
import static edu.nwu.bioinformatics.commons.DateUtils.createDate;
import static java.util.Arrays.asList;
import static java.util.Calendar.*;
import static java.util.Collections.singletonList;
import static org.easymock.classextension.EasyMock.*;
/**
* @author Rhett Sutphin
*/
public class SubjectServiceTest extends StudyCalendarTestCase {
private SubjectService service;
private SubjectDao subjectDao;
private AmendmentService amendmentService;
private PscUser user;
private StudySegment studySegment;
private Activity a1;
@Override
protected void setUp() throws Exception {
super.setUp();
subjectDao = registerDaoMockFor(SubjectDao.class);
amendmentService = registerMockFor(AmendmentService.class);
service = new SubjectService();
service.setSubjectDao(subjectDao);
service.setAmendmentService(amendmentService);
Epoch epoch = Epoch.create("Epoch", "A", "B", "C");
studySegment = epoch.getStudySegments().get(0);
Period p1 = createPeriod("P1", 1, 7, 3);
Period p2 = createPeriod("P2", 3, 1, 1);
Period p3 = createPeriod("P3", 8, 28, 2);
studySegment.addPeriod(p1);
studySegment.addPeriod(p2);
studySegment.addPeriod(p3);
a1 = new Activity();
a1.setId(1);
a1.setName("CBC");
// days
p1.addPlannedActivity(setId(1, createPlannedActivity("CBC", 1, "CBC Details"))); // 1, 8, 15
p1.addPlannedActivity(setId(2, createPlannedActivity("Vitals", 3, "Vitals Details"))); // 3, 10, 17
p2.addPlannedActivity(setId(3, createPlannedActivity("Questionnaire", 1, "Questionnaire Details"))); // 3
p3.addPlannedActivity(setId(4, createPlannedActivity("Infusion", 1, "Infusion Details"))); // 8, 36
p3.addPlannedActivity(setId(5, createPlannedActivity("Infusion", 18, "Infusion Details"))); // 25, 53
user = AuthorizationObjectFactory.createPscUser("sc", 67L);
}
public void testAssignSubject() throws Exception {
Study study = createNamedInstance("Glancing", Study.class);
Amendment expectedAmendment = new Amendment();
study.setAmendment(expectedAmendment);
Site site = createNamedInstance("Lake", Site.class);
StudySite studySite = createStudySite(study, site);
studySite.approveAmendment(expectedAmendment, DateTools.createDate(2004, OCTOBER, 18));
Subject subjectIn = createSubject("Alice", "Childress");
Date startDate = DateUtils.createDate(2006, OCTOBER, 31);
String studySubjectId = "SSId1";
StudySegment expectedStudySegment = Epoch.create("Treatment", "A", "B", "C").getStudySegments().get(1);
expectedStudySegment.addPeriod(createPeriod("DC", 1, 7, 1));
expectedStudySegment.getPeriods().iterator().next().addPlannedActivity(createPlannedActivity("Any", 4));
expect(amendmentService.getAmendedNode(expectedStudySegment, expectedAmendment)).andReturn(expectedStudySegment);
Subject subjectExpectedSave = createSubject("Alice", "Childress");
StudySubjectAssignment expectedAssignment = new StudySubjectAssignment();
expectedAssignment.setStartDate(startDate);
expectedAssignment.setSubject(subjectExpectedSave);
expectedAssignment.setStudySite(studySite);
expectedAssignment.setStudySubjectId(studySubjectId);
expectedAssignment.setCurrentAmendment(expectedAmendment);
subjectExpectedSave.addAssignment(expectedAssignment);
subjectDao.save(subjectExpectedSave);
expectLastCall().times(2);
replayMocks();
StudySubjectAssignment actualAssignment = service.assignSubject(
studySite,
new Registration.Builder().
subject(subjectIn).firstStudySegment(expectedStudySegment).date(startDate).
studySubjectId(studySubjectId).manager(user).
toRegistration());
verifyMocks();
assertNotNull("Assignment not returned", actualAssignment);
assertEquals("Assignment not added to subject", 1, subjectIn.getAssignments().size());
assertEquals("Assignment not added to subject", actualAssignment, subjectIn.getAssignments().get(0));
assertNotNull(actualAssignment.getScheduledCalendar());
assertEquals(1, actualAssignment.getScheduledCalendar().getScheduledStudySegments().size());
ScheduledStudySegment scheduledStudySegment = actualAssignment.getScheduledCalendar().getScheduledStudySegments().get(0);
assertEquals(expectedStudySegment, scheduledStudySegment.getStudySegment());
assertPositive("No scheduled events", scheduledStudySegment.getActivities().size());
assertEquals("Coordinator not set", 67, (Object) actualAssignment.getManagerCsmUserId());
}
public void testAssignSubjectRespectsCurrentApprovedAmendment() throws Exception {
Subject subject = new Subject();
Study study = createBasicTemplate();
study.setAmendment(createAmendments(
DateTools.createDate(2005, MAY, 12),
DateTools.createDate(2005, JUNE, 13)
));
Site mayo = createNamedInstance("Mayo", Site.class);
StudySite ss = createStudySite(study, mayo);
Amendment currentApproved = study.getAmendment().getPreviousAmendment();
ss.approveAmendment(currentApproved, DateTools.createDate(2005, MAY, 13));
StudySegment seg = study.getPlannedCalendar().getEpochs().get(0).getStudySegments().get(0);
seg.addPeriod(createPeriod("P0", 4, 55, 1));
expect(amendmentService.getAmendedNode(seg, currentApproved)).andReturn(seg);
subjectDao.save(subject);
expectLastCall().times(2);
replayMocks();
StudySubjectAssignment actual = service.assignSubject(
ss,
new Registration.Builder().subject(subject).firstStudySegment(seg).
date(DateTools.createDate(2006, JANUARY, 11)).
toRegistration());
verifyMocks();
assertSame("Wrong amendment for new assignment", currentApproved, actual.getCurrentAmendment());
}
public void testExceptionWhenAssigningASubjectToASiteWithNoApprovedAmendments() throws Exception {
Subject subject = new Subject();
Study study = createBasicTemplate();
study.setAssignedIdentifier("ECOG 2502");
study.setAmendment(createAmendments(
DateTools.createDate(2005, MAY, 12),
DateTools.createDate(2005, JUNE, 13)
));
Site mayo = createNamedInstance("Mayo", Site.class);
StudySite ss = createStudySite(study, mayo);
StudySegment seg = study.getPlannedCalendar().getEpochs().get(0).getStudySegments().get(0);
seg.addPeriod(createPeriod("P0", 4, 55, 1));
subjectDao.save(subject);
expectLastCall().times(2);
replayMocks();
try {
service.assignSubject(
ss,
new Registration.Builder().subject(subject).firstStudySegment(seg).
date(DateTools.createDate(2006, JANUARY, 11)).
toRegistration());
fail("Exception not thrown");
} catch (StudyCalendarSystemException scse) {
assertEquals("The template for ECOG 2502 has not been approved by Mayo", scse.getMessage());
}
}
public void testScheduleFirstStudySegment() throws Exception {
StudySubjectAssignment assignment = new StudySubjectAssignment();
assignment.setSubject(createSubject("Alice", "Childress"));
subjectDao.save(assignment.getSubject());
StudySite studySite = new StudySite();
studySite.setSite(new Site());
assignment.setStudySite(studySite);
Amendment expectedAmendment = new Amendment();
assignment.setCurrentAmendment(expectedAmendment);
expect(amendmentService.getAmendedNode(studySegment, expectedAmendment)).andReturn(studySegment);
replayMocks();
ScheduledStudySegment returnedStudySegment = service.scheduleStudySegment(
assignment, studySegment, DateUtils.createDate(2006, APRIL, 1),
NextStudySegmentMode.PER_PROTOCOL);
verifyMocks();
ScheduledCalendar scheduledCalendar = assignment.getScheduledCalendar();
assertNotNull("Scheduled calendar not created", scheduledCalendar);
assertEquals("Study segment not added to scheduled study segments", 1, scheduledCalendar.getScheduledStudySegments().size());
assertSame("Study segment not added to scheduled arms", returnedStudySegment, scheduledCalendar.getScheduledStudySegments().get(0));
assertSame("Wrong study segment scheduled", studySegment, scheduledCalendar.getScheduledStudySegments().get(0).getStudySegment());
assertEquals("Wrong start day for scheduled study segment", 1, (int) returnedStudySegment.getStartDay());
assertDayOfDate("Wrong start date for scheduled study segment", 2006, APRIL, 1, returnedStudySegment.getStartDate());
List<ScheduledActivity> events = scheduledCalendar.getScheduledStudySegments().get(0).getActivities();
assertEquals("Wrong number of events added", 11, events.size());
Activity a1 = createNamedInstance("CBC", Activity.class);
Activity a2 = createNamedInstance("Vitals", Activity.class);
Activity a3 = createNamedInstance("Questionnaire", Activity.class);
Activity a4 = createNamedInstance("Infusion", Activity.class);
assertNewlyScheduledActivity(2006, APRIL, 1, 1, "CBC Details", a1, 0, events.get(0));
assertNewlyScheduledActivity(2006, APRIL, 3, 2, "Vitals Details", a2, 0, events.get(1));
assertNewlyScheduledActivity(2006, APRIL, 3, 3, "Questionnaire Details", a3, 0, events.get(2));
assertNewlyScheduledActivity(2006, APRIL, 8, 1, "CBC Details", a1, 1, events.get(3));
assertNewlyScheduledActivity(2006, APRIL, 8, 4, "Infusion Details", a4, 0, events.get(4));
assertNewlyScheduledActivity(2006, APRIL, 10, 2, "Vitals Details", a2, 1, events.get(5));
assertNewlyScheduledActivity(2006, APRIL, 15, 1, "CBC Details", a1, 2, events.get(6));
assertNewlyScheduledActivity(2006, APRIL, 17, 2, "Vitals Details", a2, 2, events.get(7));
assertNewlyScheduledActivity(2006, APRIL, 25, 5, "Infusion Details", a4, 0, events.get(8));
assertNewlyScheduledActivity(2006, MAY, 6, 4, "Infusion Details", a4, 1, events.get(9));
assertNewlyScheduledActivity(2006, MAY, 23, 5, "Infusion Details", a4, 1, events.get(10));
assertSame("Source amendment not set on SEs", expectedAmendment, events.get(7).getSourceAmendment());
}
public void testScheduleFirstStudySegmentWithNegativeDays() throws Exception {
studySegment.getPeriods().first().setStartDay(-7);
// this will shift the days for events in the first period:
// event 1: -7, 0, 7
// event 2: -5, 2, 9
StudySubjectAssignment assignment = new StudySubjectAssignment();
assignment.setSubject(createSubject("Alice", "Childress"));
subjectDao.save(assignment.getSubject());
StudySite studySite = new StudySite();
studySite.setSite(new Site());
assignment.setStudySite(studySite);
Amendment expectedAmendment = new Amendment();
assignment.setCurrentAmendment(expectedAmendment);
expect(amendmentService.getAmendedNode(studySegment, expectedAmendment)).andReturn(studySegment);
replayMocks();
ScheduledStudySegment returnedStudySegment = service.scheduleStudySegment(
assignment, studySegment, DateUtils.createDate(2006, MARCH, 24), NextStudySegmentMode.PER_PROTOCOL);
verifyMocks();
ScheduledCalendar scheduledCalendar = assignment.getScheduledCalendar();
assertNotNull("Scheduled calendar not created", scheduledCalendar);
assertEquals("Study segment not added to scheduled study segments", 1, scheduledCalendar.getScheduledStudySegments().size());
assertSame("Study segment not added to scheduled study segments", returnedStudySegment, scheduledCalendar.getScheduledStudySegments().get(0));
assertSame("Wrong study segment scheduled", studySegment, scheduledCalendar.getScheduledStudySegments().get(0).getStudySegment());
List<ScheduledActivity> events = scheduledCalendar.getScheduledStudySegments().get(0).getActivities();
assertEquals("Wrong number of events added", 11, events.size());
assertEquals("Wrong start day for study segment", -7, (int) returnedStudySegment.getStartDay());
assertDayOfDate("Wrong start date for study segment", 2006, MARCH, 24, returnedStudySegment.getStartDate());
assertNewlyScheduledActivity(2006, MARCH, 24, 1, events.get(0));
assertNewlyScheduledActivity(2006, MARCH, 26, 2, events.get(1));
assertNewlyScheduledActivity(2006, MARCH, 31, 1, events.get(2));
assertNewlyScheduledActivity(2006, APRIL, 2, 2, events.get(3));
assertNewlyScheduledActivity(2006, APRIL, 3, 3, events.get(4));
assertNewlyScheduledActivity(2006, APRIL, 7, 1, events.get(5));
assertNewlyScheduledActivity(2006, APRIL, 8, 4, events.get(6));
assertNewlyScheduledActivity(2006, APRIL, 9, 2, events.get(7));
assertNewlyScheduledActivity(2006, APRIL, 25, 5, events.get(8));
assertNewlyScheduledActivity(2006, MAY, 6, 4, events.get(9));
assertNewlyScheduledActivity(2006, MAY, 23, 5, events.get(10));
}
public void testUnmatchedStudySegmentThrowsException() throws Exception {
StudySubjectAssignment assignment = new StudySubjectAssignment();
Amendment expectedAmendment = new Amendment();
assignment.setCurrentAmendment(expectedAmendment);
expect(amendmentService.getAmendedNode(studySegment, expectedAmendment)).andReturn(null);
replayMocks();
try {
service.scheduleStudySegment(
assignment, studySegment, DateUtils.createDate(2005, SEPTEMBER, 1), NextStudySegmentMode.IMMEDIATE);
fail("Exception not thrown");
} catch (StudyCalendarSystemException scse) {
assertEquals("Could not find a node " +studySegment +" in the target study", scse.getMessage());
}
verifyMocks();
}
public void testScheduleImmediateNextStudySegment() throws Exception {
StudySubjectAssignment assignment = new StudySubjectAssignment();
ScheduledCalendar calendar = new ScheduledCalendar();
assignment.setScheduledCalendar(calendar);
assignment.setSubject(createSubject("Alice", "Childress"));
Amendment expectedAmendment = new Amendment();
assignment.setCurrentAmendment(expectedAmendment);
expect(amendmentService.getAmendedNode(studySegment, expectedAmendment)).andReturn(studySegment);
ScheduledStudySegment existingStudySegment = new ScheduledStudySegment();
existingStudySegment.addEvent(createScheduledActivity("CBC", 2005, AUGUST, 1));
existingStudySegment.addEvent(createScheduledActivity("CBC", 2005, AUGUST, 2,
ScheduledActivityMode.OCCURRED.createStateInstance(DateUtils.createDate(2005, AUGUST, 4), null)));
existingStudySegment.addEvent(createScheduledActivity("CBC", 2005, AUGUST, 3,
ScheduledActivityMode.CANCELED.createStateInstance(DateUtils.createDate(2005, AUGUST, 4), null)));
calendar.addStudySegment(existingStudySegment);
subjectDao.save(assignment.getSubject());
StudySite studySite = new StudySite();
studySite.setSite(new Site());
assignment.setStudySite(studySite);
replayMocks();
ScheduledStudySegment returnedStudySegment = service.scheduleStudySegment(
assignment, studySegment, DateUtils.createDate(2005, SEPTEMBER, 1), NextStudySegmentMode.IMMEDIATE);
verifyMocks();
ScheduledCalendar scheduledCalendar = assignment.getScheduledCalendar();
assertEquals("Study segment not added to scheduled arms", 2, scheduledCalendar.getScheduledStudySegments().size());
assertSame("Study segment not added to scheduled arms", returnedStudySegment, scheduledCalendar.getScheduledStudySegments().get(1));
assertSame("Wrong study segment scheduled", studySegment, scheduledCalendar.getScheduledStudySegments().get(1).getStudySegment());
List<ScheduledActivity> events = scheduledCalendar.getScheduledStudySegments().get(1).getActivities();
assertEquals("Wrong number of events added", 11, events.size());
assertNewlyScheduledActivity(2005, SEPTEMBER, 1, 1, events.get(0));
assertNewlyScheduledActivity(2005, SEPTEMBER, 3, 2, events.get(1));
assertNewlyScheduledActivity(2005, SEPTEMBER, 3, 3, events.get(2));
assertNewlyScheduledActivity(2005, SEPTEMBER, 8, 1, events.get(3));
assertNewlyScheduledActivity(2005, SEPTEMBER, 8, 4, events.get(4));
assertNewlyScheduledActivity(2005, SEPTEMBER, 10, 2, events.get(5));
assertNewlyScheduledActivity(2005, SEPTEMBER, 15, 1, events.get(6));
assertNewlyScheduledActivity(2005, SEPTEMBER, 17, 2, events.get(7));
assertNewlyScheduledActivity(2005, SEPTEMBER, 25, 5, events.get(8));
assertNewlyScheduledActivity(2005, OCTOBER, 6, 4, events.get(9));
assertNewlyScheduledActivity(2005, OCTOBER, 23, 5, events.get(10));
ScheduledActivity wasScheduledActivity = existingStudySegment.getActivities().get(0);
assertEquals("No new state in scheduled", 2, wasScheduledActivity.getAllStates().size());
assertEquals("Scheduled event not canceled", ScheduledActivityMode.CANCELED, wasScheduledActivity.getCurrentState().getMode());
assertEquals("Wrong reason for cancelation", "Immediate transition to Epoch: A", wasScheduledActivity.getCurrentState().getReason());
ScheduledActivity wasOccurredEvent = existingStudySegment.getActivities().get(1);
assertEquals("Occurred event changed", 2, wasOccurredEvent.getAllStates().size());
assertEquals("Occurred event changed", ScheduledActivityMode.OCCURRED, wasOccurredEvent.getCurrentState().getMode());
ScheduledActivity wasCanceledEvent = existingStudySegment.getActivities().get(2);
assertEquals("Canceled event changed", 2, wasCanceledEvent.getAllStates().size());
assertEquals("Canceled event changed", ScheduledActivityMode.CANCELED, wasCanceledEvent.getCurrentState().getMode());
}
private void assertNewlyScheduledActivity(
int expectedYear, int expectedMonth, int expectedDayOfMonth,
int expectedPlannedActivityId, ScheduledActivity actualEvent
) {
assertEquals("Wrong associated planned event", expectedPlannedActivityId, (int) actualEvent.getPlannedActivity().getId());
assertDayOfDate("Wrong ideal date", expectedYear, expectedMonth, expectedDayOfMonth, actualEvent.getIdealDate());
ScheduledActivityState currentState = actualEvent.getCurrentState();
assertEquals("Wrong current state mode", ScheduledActivityMode.SCHEDULED, currentState.getMode());
assertEquals("Current and ideal date not same", actualEvent.getIdealDate(), currentState.getDate());
assertEquals("Wrong reason", "Initialized from template", currentState.getReason());
}
private void assertNewlyScheduledActivity(
int expectedYear, int expectedMonth, int expectedDayOfMonth,
int expectedPlannedActivityId, String expectedDetails,
Activity expectedActivity, int expectedRepetitionNumber,
ScheduledActivity actualEvent
){
assertNewlyScheduledActivity(expectedYear, expectedMonth, expectedDayOfMonth, expectedPlannedActivityId, actualEvent);
assertEquals("Wrong details", expectedDetails, actualEvent.getDetails());
assertEquals("Wrong repetition number", expectedRepetitionNumber, (int) actualEvent.getRepetitionNumber());
assertNotNull("No activity", actualEvent.getActivity());
assertEquals("Wrong activity", expectedActivity.getName(), actualEvent.getActivity().getName());
}
public void testFindDaysOfWeekInMonth() throws Exception {
List<Date> actual = service.findDaysOfWeekInMonth(2007, Calendar.MAY, Calendar.WEDNESDAY);
assertEquals("There were 5 Wednesdays in May 2007: " + actual, 5, actual.size());
assertDayOfDate("Wrong 1st Wed.", 2007, Calendar.MAY, 2, actual.get(0));
assertDayOfDate("Wrong 2nd Wed.", 2007, Calendar.MAY, 9, actual.get(1));
assertDayOfDate("Wrong 3rd Wed.", 2007, Calendar.MAY, 16, actual.get(2));
assertDayOfDate("Wrong 4th Wed.", 2007, Calendar.MAY, 23, actual.get(3));
assertDayOfDate("Wrong 5th Wed.", 2007, Calendar.MAY, 30, actual.get(4));
}
public void testFindDaysOfWeekInMonthWithFirstDayMatch() throws Exception {
List<Date> actual = service.findDaysOfWeekInMonth(1981, Calendar.OCTOBER, Calendar.THURSDAY);
assertEquals("There were 5 Thursdays in October 1981: " + actual, 5, actual.size());
assertDayOfDate("Wrong 1st Th.", 1981, Calendar.OCTOBER, 1, actual.get(0));
assertDayOfDate("Wrong 2nd Th.", 1981, Calendar.OCTOBER, 8, actual.get(1));
assertDayOfDate("Wrong 3rd Th.", 1981, Calendar.OCTOBER, 15, actual.get(2));
assertDayOfDate("Wrong 4th Th.", 1981, Calendar.OCTOBER, 22, actual.get(3));
assertDayOfDate("Wrong 5th Th.", 1981, Calendar.OCTOBER, 29, actual.get(4));
}
public void testFindDaysOfWeekInMonthWithLastDayMatch() throws Exception {
List<Date> actual = service.findDaysOfWeekInMonth(1993, Calendar.FEBRUARY, Calendar.SUNDAY);
assertEquals("There were 4 Sundays in February 1993: " + actual, 4, actual.size());
assertDayOfDate("Wrong 1st Su.", 1993, Calendar.FEBRUARY, 7, actual.get(0));
assertDayOfDate("Wrong 2nd Su.", 1993, Calendar.FEBRUARY, 14, actual.get(1));
assertDayOfDate("Wrong 3rd Su.", 1993, Calendar.FEBRUARY, 21, actual.get(2));
assertDayOfDate("Wrong 4th Su.", 1993, Calendar.FEBRUARY, 28, actual.get(3));
}
public void testShiftDayByOne() throws Exception {
Calendar cal = getInstance();
Date date = cal.getTime();
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
Date newDate = service.shiftDayByOne(date);
assertNotEquals("dates are equals", df.format(newDate), df.format(date));
java.sql.Timestamp timestampTo = new java.sql.Timestamp(newDate.getTime());
long oneDay = 24 * 60 * 60 * 1000;
timestampTo.setTime(timestampTo.getTime() - oneDay);
assertEquals("dates are not equals", df.format(timestampTo), df.format(date));
}
public void testAvoidingBlackoutDateDelaysSAByOneDayWithReason() throws Exception {
ScheduledActivity sa = createScheduledActivity("CBC", 2010, Calendar.APRIL, 30);
service.shiftToAvoidBlackoutDate(sa, createSite("DC"), "Arbor Day");
assertEquals("Wrong new state", ScheduledActivityMode.SCHEDULED, sa.getCurrentState().getMode());
assertDayOfDate("Wrong new date", 2010, Calendar.MAY, 1, sa.getCurrentState().getDate());
assertEquals("Wrong reason", "Rescheduled: Arbor Day", sa.getCurrentState().getReason());
}
public void testAvoidingBlackoutDateDelaysPreservesHistory() throws Exception {
ScheduledActivity sa = createScheduledActivity("CBC", 2010, Calendar.APRIL, 30);
service.shiftToAvoidBlackoutDate(sa, createSite("DC"), "Arbor Day");
assertEquals("Wrong number of historical states", 1, sa.getPreviousStates().size());
assertDayOfDate("Old date not preserved",
2010, Calendar.APRIL, 30, sa.getPreviousStates().get(0).getDate());
}
public void testShiftConditionalActivityPreservesConditionalness() throws Exception {
ScheduledActivity sa = createConditionalEvent("CBC", 2010, Calendar.APRIL, 30);
service.shiftToAvoidBlackoutDate(sa, createSite("DC"), "Arbor Day");
assertEquals(ScheduledActivityMode.CONDITIONAL, sa.getCurrentState().getMode());
}
public void testAvoidWeekendsAndHolidays() throws Exception {
StudySubjectAssignment assignment = new StudySubjectAssignment();
ScheduledCalendar scheduledCalendar = new ScheduledCalendar();
assignment.setScheduledCalendar(scheduledCalendar);
Epoch epoch = Epoch.create("Epoch", "A", "B", "C");
StudySegment scheduledStudySegment = epoch.getStudySegments().get(0);
Period p1 = createPeriod("P1", 1, 7, 3);
scheduledStudySegment.addPeriod(p1);
p1.addPlannedActivity(setId(1, createPlannedActivity("CBC", 1)));
Amendment expectedAmendment = new Amendment();
assignment.setCurrentAmendment(expectedAmendment);
expect(amendmentService.getAmendedNode(scheduledStudySegment, expectedAmendment))
.andReturn(scheduledStudySegment);
StudySite studySite = new StudySite();
Site site = new Site();
SpecificDateBlackout holidayOne = new SpecificDateBlackout();
holidayOne.setDay(1);
holidayOne.setMonth(AUGUST);
holidayOne.setYear(2005);
SpecificDateBlackout holidayTwo = new SpecificDateBlackout();
holidayTwo.setDay(2);
holidayTwo.setMonth(AUGUST);
holidayTwo.setYear(2005);
SpecificDateBlackout holidayThree = new SpecificDateBlackout();
holidayThree.setDay(3);
holidayThree.setMonth(AUGUST);
holidayThree.setYear(2005);
List<BlackoutDate> listOfHolidays = new ArrayList<BlackoutDate>();
listOfHolidays.add(holidayOne);
listOfHolidays.add(holidayTwo);
listOfHolidays.add(holidayThree);
site.setBlackoutDates(listOfHolidays);
studySite.setSite(site);
assignment.setStudySite(studySite);
assignment.setSubject(createSubject("Alice", "Childress"));
subjectDao.save(assignment.getSubject());
replayMocks();
ScheduledStudySegment returnedStudySegment = service.scheduleStudySegment(
assignment, scheduledStudySegment, DateUtils.createDate(2005, AUGUST , 1),
NextStudySegmentMode.PER_PROTOCOL);
verifyMocks();
List<ScheduledActivity> events = returnedStudySegment.getActivities();
assertNotNull("Scheduled calendar not created", scheduledCalendar);
assertEquals("Study segment not added to scheduled study segments", 1, scheduledCalendar.getScheduledStudySegments().size());
assertSame("Study segment not added to scheduled study segments", returnedStudySegment, scheduledCalendar.getScheduledStudySegments().get(0));
assertEquals("Wrong number of events added", 3, events.size());
Calendar calendar = getInstance();
calendar.setTime(events.get(0).getActualDate());
assertEquals("Date is not reset ", calendar.get(DAY_OF_MONTH), 4);
assertEquals("Month is not reset ", calendar.get(MONTH), AUGUST);
assertEquals("Date is not reset ", calendar.get(YEAR), 2005);
assertNewlyScheduledActivity(2005, AUGUST, 8, 1, events.get(1));
assertNewlyScheduledActivity(2005, AUGUST, 15, 1, events.get(2));
}
public void testTakeSubjectOffStudy() throws Exception {
Date startDate = DateUtils.createDate(2007, AUGUST, 31);
Date expectedEndDate = DateUtils.createDate(2007, SEPTEMBER, 4);
StudySubjectAssignment expectedAssignment = setId(1, new StudySubjectAssignment());
expectedAssignment.setStartDate(startDate);
ScheduledStudySegment studySegment0 = new ScheduledStudySegment();
studySegment0.addEvent(createScheduledActivityWithStudy("ABC", 2007, SEPTEMBER, 2, ScheduledActivityMode.OCCURRED.createStateInstance()));
studySegment0.addEvent(createScheduledActivityWithStudy("DEF", 2007, SEPTEMBER, 4, ScheduledActivityMode.CANCELED.createStateInstance()));
studySegment0.addEvent(createScheduledActivityWithStudy("GHI", 2007, SEPTEMBER, 6, ScheduledActivityMode.OCCURRED.createStateInstance()));
studySegment0.addEvent(createScheduledActivityWithStudy("JKL", 2007, SEPTEMBER, 8, ScheduledActivityMode.SCHEDULED.createStateInstance()));
ScheduledStudySegment studySegment1 = new ScheduledStudySegment();
studySegment1.addEvent(createScheduledActivityWithStudy("MNO", 2007, OCTOBER, 2, ScheduledActivityMode.OCCURRED.createStateInstance()));
studySegment1.addEvent(createScheduledActivityWithStudy("PQR", 2007, OCTOBER, 4, ScheduledActivityMode.SCHEDULED.createStateInstance()));
studySegment1.addEvent(createScheduledActivityWithStudy("STU", 2007, OCTOBER, 6, ScheduledActivityMode.SCHEDULED.createStateInstance()));
studySegment1.addEvent(createScheduledActivityWithStudy("VWX", 2007, OCTOBER, 8, ScheduledActivityMode.SCHEDULED.createStateInstance()));
studySegment1.addEvent(createConditionalEventWithStudy("YZA", 2007, OCTOBER, 10));
ScheduledCalendar calendar = new ScheduledCalendar();
calendar.setAssignment(expectedAssignment);
calendar.addStudySegment(studySegment0);
calendar.addStudySegment(studySegment1);
expectedAssignment.setScheduledCalendar(calendar);
subjectDao.save(expectedAssignment.getSubject());
replayMocks();
StudySubjectAssignment actualAssignment = service.takeSubjectOffStudy(expectedAssignment, expectedEndDate);
verifyMocks();
CoreTestCase.assertDayOfDate("Wrong off study day", 2007, SEPTEMBER, 4, actualAssignment.getEndDate());
assertEquals("Wrong Event Mode", ScheduledActivityMode.OCCURRED, studySegment0.getActivities().get(2).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.CANCELED, studySegment0.getActivities().get(3).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.OCCURRED, studySegment1.getActivities().get(0).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.CANCELED, studySegment1.getActivities().get(1).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.CANCELED, studySegment1.getActivities().get(2).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.CANCELED, studySegment1.getActivities().get(3).getCurrentState().getMode());
assertEquals("Wrong Event Mode", ScheduledActivityMode.NOT_APPLICABLE, studySegment1.getActivities().get(4).getCurrentState().getMode());
}
public void testScheduleStudySegmentWithOffStudySubject() {
StudySubjectAssignment assignment = new StudySubjectAssignment();
assignment.setSubject(createSubject("Alice", "Childress"));
assignment.setEndDate(DateUtils.createDate(2006, APRIL, 1));
StudySite studySite = new StudySite();
studySite.setSite(new Site());
assignment.setStudySite(studySite);
replayMocks();
ScheduledStudySegment returnedStudySegment = service.scheduleStudySegment(
assignment, studySegment, DateUtils.createDate(2006, APRIL, 1),
NextStudySegmentMode.PER_PROTOCOL);
verifyMocks();
ScheduledCalendar scheduledCalendar = assignment.getScheduledCalendar();
assertNull("Scheduled calendar not created", scheduledCalendar);
assertSame("Study segment not added to scheduled study segments", null, returnedStudySegment);
}
public void testSchedulePlannedEventWithPopulationWhenSubjectIsInPopulation() throws Exception {
PlannedActivity plannedActivity = Fixtures.createPlannedActivity("elph", 4);
plannedActivity.setPopulation(createNamedInstance("H+", Population.class));
Period period = Fixtures.createPeriod("DC", 2, 7, 1);
StudySubjectAssignment assignment = new StudySubjectAssignment();
assignment.addPopulation(plannedActivity.getPopulation());
ScheduledStudySegment segment = createScheduledStudySegment(assignment);
service.schedulePlannedActivity(plannedActivity, period, new Amendment(),
"Initialized from template", segment);
assertEquals("Wrong number of activites scheduled", 1, segment.getActivities().size());
assertSame("Wrong activity scheduled, somehow", plannedActivity,
segment.getActivities().get(0).getPlannedActivity());
}
public void testSchedulePlannedEventWithPopulationWhenSubjectIsInNotPopulation() throws Exception {
PlannedActivity plannedActivity = edu.northwestern.bioinformatics.studycalendar.core.Fixtures.createPlannedActivity("elph", 4);
plannedActivity.setPopulation(createNamedInstance("H+", Population.class));
Period period = Fixtures.createPeriod("DC", 2, 7, 1);
StudySubjectAssignment assignment = new StudySubjectAssignment();
assignment.addPopulation(createNamedInstance("Different population", Population.class));
ScheduledStudySegment segment = createScheduledStudySegment(assignment);
service.schedulePlannedActivity(plannedActivity, period, new Amendment(), "DC", segment);
assertEquals("No activites should have been scheduled", 0, segment.getActivities().size());
}
public void testSchedulePlannedActivityCopiesLabelsForCorrectRepetitionOnly() throws Exception {
PlannedActivity plannedActivity = Fixtures.createPlannedActivity("elph", 4);
labelPlannedActivity(plannedActivity, "all");
labelPlannedActivity(plannedActivity, 0, "zero");
Period period = Fixtures.createPeriod("DC", 2, 7, 2);
StudySubjectAssignment assignment = new StudySubjectAssignment();
ScheduledStudySegment segment = createScheduledStudySegment(assignment);
service.schedulePlannedActivity(plannedActivity, period, new Amendment(),
"Initialized from template", segment);
assertEquals("Wrong number of activites scheduled", 2, segment.getActivities().size());
assertEquals("Wrong number of labels for SA 0", 2, segment.getActivities().get(0).getLabels().size());
assertEquals("Wrong first label for SA 0", "all", segment.getActivities().get(0).getLabels().first());
assertEquals("Wrong second label for SA 0", "zero", segment.getActivities().get(0).getLabels().last());
assertEquals("Wrong number of labels for SA 1", 1, segment.getActivities().get(1).getLabels().size());
assertEquals("Wrong label for SA 1", "all", segment.getActivities().get(1).getLabels().first());
}
private ScheduledStudySegment createScheduledStudySegment(StudySubjectAssignment assignment) {
ScheduledStudySegment segment = new ScheduledStudySegment();
segment.setScheduledCalendar(new ScheduledCalendar());
segment.getScheduledCalendar().setAssignment(assignment);
segment.setStartDay(1);
segment.setStartDate(new Date());
return segment;
}
public void testFindSubjectWithAllAttributes() {
Subject subject = createSubject("1111", "john", "doe", createDate(1990, Calendar.JANUARY, 15, 0, 0, 0), Gender.MALE);
expectFindSubjectByPersonId("1111", subject);
replayMocks();
Subject actual = service.findSubject(subject);
verifyMocks();
assertSame("Subjects should be the same", subject, actual);
}
public void testReadElementByPersonId() {
Subject subject = createSubject("1111", null, null, null, Gender.MALE);
expectFindSubjectByPersonId("1111", subject);
replayMocks();
Subject actual = service.findSubject(subject);
verifyMocks();
assertSame("Subjects should be the same", subject, actual);
}
public void testReadElementByFirstNameLastNameAndBirthDate() {
Subject subject = createSubject(null, "john", "doe", createDate(1990, Calendar.JANUARY, 15, 0, 0, 0), Gender.MALE);
expectFindSubjectByFirstNameLastNameAndBirthDate("john", "doe", createDate(1990, Calendar.JANUARY, 15, 0, 0, 0), subject);
replayMocks();
Subject actual = service.findSubject(subject);
verifyMocks();
assertSame("Subjects should be the same", subject, actual);
}
public void testFindSubjectByNameAndDoBWhenNotPresent() throws Exception {
Subject subject = createSubject(null, "john", "doe", createDate(1990, Calendar.JANUARY, 15, 0, 0, 0), Gender.MALE);
expect(subjectDao.findSubjectByFirstNameLastNameAndDoB(subject.getFirstName(), subject.getLastName(), subject.getDateOfBirth()))
.andReturn(null); // this is the actual behavior of SubjectDao
replayMocks();
Subject actual = service.findSubject(subject);
verifyMocks();
assertNull("Subject should not be found", actual);
}
public void testAssignSubjectWithPopulationScopedActivity() {
Population population = createPopulation("Male", "M");
Study study = createSingleEpochStudy("Study A", "Epoch A", "Segment A");
study.addPopulation(population);
StudySegment segment = study.getPlannedCalendar().getEpochs().get(0).getStudySegments().get(0);
Period period = createPeriod("Period A", 1, 2, 1);
segment.addPeriod(period);
PlannedActivity plannedActivity = createPlannedActivity(a1, 1);
plannedActivity.setPopulation(population);
period.addPlannedActivity(plannedActivity);
Subject subject = createSubject("Bernie", "Mac");
Site site = createSite("NU");
StudySite studySite = createStudySite(study, site);
Amendment amendment = new Amendment();
study.setAmendment(amendment);
AmendmentApproval amendmentApproval = new AmendmentApproval();
amendmentApproval.setAmendment(amendment);
studySite.addAmendmentApproval(amendmentApproval);
expect(amendmentService.getAmendedNode((PlanTreeNode) notNull(), (Amendment) notNull())).andReturn(segment);
subjectDao.save((Subject) notNull());
subjectDao.save((Subject) notNull());
replayMocks();
StudySubjectAssignment actual = service.assignSubject(
studySite,
new Registration.Builder().subject(subject).firstStudySegment(segment).
date(createDate(1990, Calendar.JANUARY, 15, 0, 0, 0)).
studySubjectId("123").populations(new HashSet<Population>(asList(population))).
manager(user).
toRegistration());
verifyMocks();
assertFalse(actual.getScheduledCalendar().getScheduledStudySegments().get(0).getActivities().isEmpty());
}
////// Expect Methods
private void expectFindSubjectByPersonId(String id, Subject returned) {
expect(subjectDao.findSubjectByPersonId(id)).andReturn(returned);
}
private void expectFindSubjectByFirstNameLastNameAndBirthDate(String firstName, String lastName, Date birthDate, Subject returned) {
expect(subjectDao.findSubjectByFirstNameLastNameAndDoB(firstName, lastName, birthDate)).andReturn(singletonList(returned));
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.forex.method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.Validate;
import com.opengamma.analytics.financial.forex.derivative.ForexOptionVanilla;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivative;
import com.opengamma.analytics.financial.interestrate.InterestRateCurveSensitivity;
import com.opengamma.analytics.financial.interestrate.YieldCurveBundle;
import com.opengamma.analytics.financial.model.option.definition.SmileDeltaTermStructureDataBundle;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackFunctionData;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackPriceFunction;
import com.opengamma.analytics.financial.model.volatility.BlackFormulaRepository;
import com.opengamma.analytics.financial.model.volatility.VolatilityAndBucketedSensitivities;
import com.opengamma.analytics.financial.model.volatility.surface.SmileDeltaTermStructureParametersStrikeInterpolation;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.matrix.DoubleMatrix1D;
import com.opengamma.analytics.math.matrix.DoubleMatrix2D;
import com.opengamma.analytics.util.surface.SurfaceValue;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.CurrencyAmount;
import com.opengamma.util.money.MultipleCurrencyAmount;
import com.opengamma.util.tuple.DoublesPair;
/**
* Pricing method for vanilla Forex option transactions with Black function and a volatility provider.
* OG-Implementation: Vanilla Forex options: Garman-Kohlhagen and risk reversal/strangle, version 1.5, May 2012.
*/
public final class ForexOptionVanillaBlackSmileMethod implements ForexPricingMethod {
/**
* The method unique instance.
*/
private static final ForexOptionVanillaBlackSmileMethod INSTANCE = new ForexOptionVanillaBlackSmileMethod();
/**
* Return the unique instance of the class.
* @return The instance.
*/
public static ForexOptionVanillaBlackSmileMethod getInstance() {
return INSTANCE;
}
/**
* Private constructor.
*/
private ForexOptionVanillaBlackSmileMethod() {
}
/**
* The Black function used in the pricing.
*/
private static final BlackPriceFunction BLACK_FUNCTION = new BlackPriceFunction();
/**
* Computes the present value of the vanilla option with the Black function and a volatility from a volatility surface.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @return The present value. The value is in the domestic currency (currency 2).
*/
public MultipleCurrencyAmount presentValue(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, dfDomestic, volatility);
final Function1D<BlackFunctionData, Double> func = BLACK_FUNCTION.getPriceFunction(optionForex);
final double price = func.evaluate(dataBlack) * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * (optionForex.isLong() ? 1.0 : -1.0);
final CurrencyAmount priceCurrency = CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), price);
return MultipleCurrencyAmount.of(priceCurrency);
}
@Override
public MultipleCurrencyAmount presentValue(final InstrumentDerivative instrument, final YieldCurveBundle curves) {
Validate.isTrue(instrument instanceof ForexOptionVanilla, "Vanilla Forex option");
Validate.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Smile delta data bundle required");
return presentValue((ForexOptionVanilla) instrument, (SmileDeltaTermStructureDataBundle) curves);
}
/**
* Computes the implied Black volatility of the vanilla option.
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @return The implied volatility.
*/
public double impliedVolatility(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
Validate.notNull(optionForex, "Forex option");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
return volatility;
}
/**
* Computes the currency exposure of the vanilla option with the Black function and a volatility from a volatility surface. The exposure is computed in both option currencies.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @return The currency exposure
*/
public MultipleCurrencyAmount currencyExposure(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, dfDomestic, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(optionForex, dataBlack);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double price = priceAdjoint[0] * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * sign;
final double deltaSpot = priceAdjoint[1] * dfForeign / dfDomestic;
final CurrencyAmount[] currencyExposure = new CurrencyAmount[2];
// Implementation note: foreign currency (currency 1) exposure = Delta_spot * amount1.
currencyExposure[0] = CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency1(), deltaSpot * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * sign);
// Implementation note: domestic currency (currency 2) exposure = -Delta_spot * amount1 * spot+PV
currencyExposure[1] = CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), -deltaSpot * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * spot * sign
+ price);
return MultipleCurrencyAmount.of(currencyExposure);
}
@Override
public MultipleCurrencyAmount currencyExposure(final InstrumentDerivative instrument, final YieldCurveBundle curves) {
Validate.isTrue(instrument instanceof ForexOptionVanilla, "Vanilla Forex option");
Validate.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Smile delta data bundle required");
return currencyExposure((ForexOptionVanilla) instrument, (SmileDeltaTermStructureDataBundle) curves);
}
/**
* Computes the relative delta of the Forex option. The relative delta is the amount in the foreign currency equivalent to the option up to the first order divided by the option notional.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The delta.
*/
public double deltaRelative(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile, final boolean directQuote) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double deltaDirect = BlackFormulaRepository.delta(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility, optionForex.isCall()) * dfForeign * sign;
if (directQuote) {
return deltaDirect;
}
final double deltaReverse = -deltaDirect * spot * spot;
return deltaReverse;
}
/**
* Computes the relative delta of the Forex option multiplied by the spot rate.
* The relative delta is the amount in the foreign currency equivalent to the option up to the first order divided by the option notional.
* The reason to multiply by the spot rate is to be able to compute the change of value for a relative increase of e of the spot rate (from X to X(1+e)).
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The delta.
*/
public double deltaRelativeSpot(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile, final boolean directQuote) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double deltaDirect = BlackFormulaRepository.delta(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility, optionForex.isCall()) * dfForeign * sign;
if (directQuote) {
return deltaDirect * spot;
}
final double deltaReverse = -deltaDirect * spot;
return deltaReverse;
}
/**
* Computes the relative gamma of the Forex option.
* The relative gamma is the second order derivative of the pv divided by the option notional.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The gamma.
*/
public double gammaRelative(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile, final boolean directQuote) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double gammaDirect = BlackFormulaRepository.gamma(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility) * (dfForeign * dfForeign) / dfDomestic * sign;
if (directQuote) {
return gammaDirect;
}
final double deltaDirect = BlackFormulaRepository.delta(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility, optionForex.isCall()) * dfForeign * sign;
final double gamma = (gammaDirect * spot + 2 * deltaDirect) * spot * spot * spot;
return gamma;
}
/**
* Computes the relative gamma of the Forex option multiplied by the spot rate.
* The relative gamma is the second oder derivative of the pv relative to the option notional.
* The reason to multiply by the spot rate is to be able to compute the change of delta for a relative increase of e of the spot rate (from X to X(1+e)).
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The gamma.
*/
public double gammaRelativeSpot(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile, final boolean directQuote) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double gammaDirect = BlackFormulaRepository.gamma(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility) * (dfForeign * dfForeign) / dfDomestic * sign;
if (directQuote) {
return gammaDirect * spot;
}
final double deltaDirect = BlackFormulaRepository.delta(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility, optionForex.isCall()) * dfForeign * sign;
final double gamma = (gammaDirect * spot + 2 * deltaDirect) * spot * spot;
return gamma;
}
/**
* Computes the gamma of the Forex option. The gamma is the second order derivative of the option present value to the spot fx rate.
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The gamma.
*/
public CurrencyAmount gamma(final ForexOptionVanilla optionForex, final YieldCurveBundle curves, final boolean directQuote) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
final double gammaRelative = gammaRelative(optionForex, smile, directQuote);
return CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), gammaRelative * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()));
}
/**
* Computes the gamma of the Forex option multiplied by the spot rate. The gamma is the second order derivative of the pv.
* The reason to multiply by the spot rate is to be able to compute the change of delta for a relative increase of e of the spot rate (from X to X(1+e)).
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @param directQuote Flag indicating if the gamma should be computed with respect to the direct quote (1 foreign = x domestic) or the reverse quote (1 domestic = x foreign)
* @return The gamma.
*/
public CurrencyAmount gammaSpot(final ForexOptionVanilla optionForex, final YieldCurveBundle curves, final boolean directQuote) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
final double gammaRelativeSpot = gammaRelativeSpot(optionForex, smile, directQuote);
return CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), gammaRelativeSpot * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()));
}
/**
* Computes the Theta (derivative with respect to the time) using the forward driftless theta in the Black formula. The theta is not scaled.
* Reference on driftless theta: The complete guide to Option Pricing Formula (2007), E. G. Haug, Mc Graw Hill, p. 67, equation (2.43)
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @return The theta. In the same currency as present value.
*/
public CurrencyAmount thetaTheoretical(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double theta = BlackFormulaRepository.theta(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility) * sign
* Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount());
return CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), theta);
}
/**
* Computes the Vanna (2nd order cross-sensitivity of the option present value to the spot fx and implied vol),
*
* $\frac{\partial^2 (PV)}{\partial FX \partial \sigma}$
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @return The Vanna. In the same currency as present value.
*/
public CurrencyAmount vanna(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double vanna = dfForeign * BlackFormulaRepository.vanna(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility)
* sign * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount());
return CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), vanna);
}
/**
* Computes the Vomma (aka Volga) (2nd order sensitivity of the option present value to the implied vol)
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @return The Volga. In the same currency as present value.
*/
public CurrencyAmount vomma(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
ArgumentChecker.notNull(curves, "Curves");
ArgumentChecker.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Yield curve bundle should contain smile data");
final SmileDeltaTermStructureDataBundle smile = (SmileDeltaTermStructureDataBundle) curves;
final double dfDomestic = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double dfForeign = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double sign = (optionForex.isLong() ? 1.0 : -1.0);
final double vomma = dfDomestic * BlackFormulaRepository.vomma(forward, optionForex.getStrike(), optionForex.getTimeToExpiry(), volatility) * sign
* Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount());
return CurrencyAmount.of(optionForex.getUnderlyingForex().getCurrency2(), vomma);
}
/**
* Computes the Volga (aka Vomma) (2nd order sensitivity of the option present value to the implied vol)
* @param optionForex The Forex option.
* @param curves The yield curve bundle.
* @return The Volga. In the same currency as present value.
*/
public CurrencyAmount volga(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
return vomma(optionForex, curves);
}
/**
* Computes the forward exchange rate associated to the Forex option (1 Cyy1 = fwd Cyy2).
* @param optionForex The Forex option.
* @param curves The curve and fx data.
* @return The forward rate.
*/
public double forwardForexRate(final ForexOptionVanilla optionForex, final YieldCurveBundle curves) {
final ForexDiscountingMethod methodForex = ForexDiscountingMethod.getInstance();
return methodForex.forwardForexRate(optionForex.getUnderlyingForex(), curves);
}
/**
* Computes the curve sensitivity of the option present value. The sensitivity of the volatility on the forward (and on the curves) is not taken into account. It is the curve
* sensitivity in the Black model where the volatility is suppose to be constant for curve and forward changes.
* @param optionForex The Forex option.
* @param smile The smile data.
* @return The curve sensitivity.
*/
public MultipleCurrencyInterestRateCurveSensitivity presentValueCurveSensitivity(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double payTime = optionForex.getUnderlyingForex().getPaymentTime();
final String domesticCurveName = optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName();
final String foreignCurveName = optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName();
// Forward sweep
final double dfDomestic = smile.getCurve(domesticCurveName).getDiscountFactor(payTime);
final double dfForeign = smile.getCurve(foreignCurveName).getDiscountFactor(payTime);
final double forward = spot * dfForeign / dfDomestic;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(optionForex, dataBlack);
// Backward sweep
final double priceBar = 1.0;
final double forwardBar = priceAdjoint[1] * dfDomestic * priceBar;
final double dfForeignBar = spot / dfDomestic * forwardBar;
final double dfDomesticBar = -spot / (dfDomestic * dfDomestic) * dfForeign * forwardBar + priceAdjoint[0] * priceBar;
final double rForeignBar = -payTime * dfForeign * dfForeignBar;
final double rDomesticBar = -payTime * dfDomestic * dfDomesticBar;
// Sensitivity object
final double factor = Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * (optionForex.isLong() ? 1.0 : -1.0);
final List<DoublesPair> listForeign = new ArrayList<DoublesPair>();
listForeign.add(new DoublesPair(payTime, rForeignBar * factor));
final Map<String, List<DoublesPair>> resultForeignMap = new HashMap<String, List<DoublesPair>>();
resultForeignMap.put(foreignCurveName, listForeign);
InterestRateCurveSensitivity result = new InterestRateCurveSensitivity(resultForeignMap);
final List<DoublesPair> listDomestic = new ArrayList<DoublesPair>();
listDomestic.add(new DoublesPair(payTime, rDomesticBar * factor));
final Map<String, List<DoublesPair>> resultDomesticMap = new HashMap<String, List<DoublesPair>>();
resultDomesticMap.put(domesticCurveName, listDomestic);
result = result.plus(new InterestRateCurveSensitivity(resultDomesticMap));
return MultipleCurrencyInterestRateCurveSensitivity.of(optionForex.getUnderlyingForex().getCurrency2(), result);
}
/**
* Present value curve sensitivity with a generic instrument as argument.
* @param instrument A vanilla Forex option.
* @param curves The volatility and curves description (SmileDeltaTermStructureDataBundle).
* @return The curve sensitivity.
*/
@Override
public MultipleCurrencyInterestRateCurveSensitivity presentValueCurveSensitivity(final InstrumentDerivative instrument, final YieldCurveBundle curves) {
Validate.isTrue(instrument instanceof ForexOptionVanilla, "Vanilla Forex option");
Validate.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Smile delta data bundle required");
return presentValueCurveSensitivity((ForexOptionVanilla) instrument, (SmileDeltaTermStructureDataBundle) curves);
}
/**
* Computes the volatility sensitivity of the vanilla option with the Black function and a volatility from a volatility surface. The sensitivity
* is computed with respect to the computed Black implied volatility and not with respect to the volatility surface input.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @return The volatility sensitivity. The sensitivity figures are, like the present value, in the domestic currency (currency 2).
*/
public PresentValueForexBlackVolatilitySensitivity presentValueBlackVolatilitySensitivity(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final double df = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime())
/ df;
final double volatility = FXVolatilityUtils.getVolatility(smile, optionForex.getCurrency1(), optionForex.getCurrency2(), optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final BlackFunctionData dataBlack = new BlackFunctionData(forward, df, volatility);
final double[] priceAdjoint = BLACK_FUNCTION.getPriceAdjoint(optionForex, dataBlack);
final double volatilitySensitivityValue = priceAdjoint[2] * Math.abs(optionForex.getUnderlyingForex().getPaymentCurrency1().getAmount()) * (optionForex.isLong() ? 1.0 : -1.0);
final DoublesPair point = DoublesPair.of(optionForex.getTimeToExpiry(),
(optionForex.getCurrency1() == smile.getCurrencyPair().getFirst()) ? optionForex.getStrike() : 1.0 / optionForex.getStrike());
// Implementation note: The strike should be in the same currency order as the input data.
final SurfaceValue result = SurfaceValue.from(point, volatilitySensitivityValue);
final PresentValueForexBlackVolatilitySensitivity sensi = new PresentValueForexBlackVolatilitySensitivity(optionForex.getUnderlyingForex().getCurrency1(), optionForex.getUnderlyingForex()
.getCurrency2(), result);
return sensi;
}
/**
* Computes the present value volatility sensitivity with a generic instrument as argument.
* @param instrument A vanilla Forex option.
* @param curves The volatility and curves description (SmileDeltaTermStructureDataBundle).
* @return The volatility sensitivity. The sensitivity figures are, like the present value, in the domestic currency (currency 2).
*/
public PresentValueForexBlackVolatilitySensitivity presentValueBlackVolatilitySensitivity(final InstrumentDerivative instrument, final YieldCurveBundle curves) {
Validate.isTrue(instrument instanceof ForexOptionVanilla, "Vanilla Forex option");
Validate.isTrue(curves instanceof SmileDeltaTermStructureDataBundle, "Smile delta data bundle required");
return presentValueBlackVolatilitySensitivity((ForexOptionVanilla) instrument, (SmileDeltaTermStructureDataBundle) curves);
}
/**
* Computes the volatility sensitivity with respect to input data for a vanilla option with the Black function and a volatility from a volatility surface. The sensitivity
* is computed with respect to each node in the volatility surface.
* @param optionForex The Forex option.
* @param smile The curve and smile data.
* @return The volatility node sensitivity. The sensitivity figures are, like the present value, in the domestic currency (currency 2).
*/
public PresentValueForexBlackVolatilityNodeSensitivityDataBundle presentValueBlackVolatilityNodeSensitivity(final ForexOptionVanilla optionForex, final SmileDeltaTermStructureDataBundle smile) {
Validate.notNull(optionForex, "Forex option");
Validate.notNull(smile, "Smile");
Validate.isTrue(smile.checkCurrencies(optionForex.getCurrency1(), optionForex.getCurrency2()), "Option currencies not compatible with smile data");
final PresentValueForexBlackVolatilitySensitivity pointSensitivity = presentValueBlackVolatilitySensitivity(optionForex, smile); // In ccy2
final SmileDeltaTermStructureParametersStrikeInterpolation volatilityModel = smile.getVolatilityModel();
final double df = smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency2().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime());
final double spot = smile.getFxRates().getFxRate(optionForex.getCurrency1(), optionForex.getCurrency2());
final double forward = spot * smile.getCurve(optionForex.getUnderlyingForex().getPaymentCurrency1().getFundingCurveName()).getDiscountFactor(optionForex.getUnderlyingForex().getPaymentTime())
/ df;
final VolatilityAndBucketedSensitivities volAndSensitivities = FXVolatilityUtils.getVolatilityAndSensitivities(smile, optionForex.getCurrency1(), optionForex.getCurrency2(),
optionForex.getTimeToExpiry(), optionForex.getStrike(), forward);
final double[][] nodeWeight = volAndSensitivities.getBucketedSensitivities();
final DoublesPair point = DoublesPair.of(optionForex.getTimeToExpiry(),
(optionForex.getCurrency1() == smile.getCurrencyPair().getFirst()) ? optionForex.getStrike() : 1.0 / optionForex.getStrike());
final double[][] vega = new double[volatilityModel.getNumberExpiration()][volatilityModel.getNumberStrike()];
for (int loopexp = 0; loopexp < volatilityModel.getNumberExpiration(); loopexp++) {
for (int loopstrike = 0; loopstrike < volatilityModel.getNumberStrike(); loopstrike++) {
vega[loopexp][loopstrike] = nodeWeight[loopexp][loopstrike] * pointSensitivity.getVega().getMap().get(point);
}
}
return new PresentValueForexBlackVolatilityNodeSensitivityDataBundle(optionForex.getUnderlyingForex().getCurrency1(), optionForex.getUnderlyingForex().getCurrency2(),
new DoubleMatrix1D(volatilityModel.getTimeToExpiration()), new DoubleMatrix1D(volatilityModel.getDeltaFull()), new DoubleMatrix2D(vega));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.webconsole;
import java.util.ArrayList;
import java.util.List;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.Factory;
import org.apache.felix.ipojo.HandlerFactory;
import org.apache.felix.ipojo.architecture.Architecture;
import org.apache.felix.ipojo.handlers.providedservice.ProvidedService;
import org.apache.felix.ipojo.util.DependencyModel;
/**
* Helper class dealing with instance and factory states.
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class StateUtils {
/**
* Gets the number of valid instances.
* @param archs the instance architecture list
* @return the number of valid instances.
*/
public static int getValidInstancesCount(List<Architecture> archs) {
int i = 0;
for (Architecture a : archs) { // Cannot be null, an empty list is returned.
if (a.getInstanceDescription().getState() == ComponentInstance.VALID) {
i ++;
}
}
return i;
}
/**
* Gets the number of invalid instances.
* @param archs the instance architecture list
* @return the number of invalid instances.
*/
public static int getInvalidInstancesCount(List<Architecture> archs) {
int i = 0;
for (Architecture a : archs) { // Cannot be null, an empty list is returned.
if (a.getInstanceDescription().getState() == ComponentInstance.INVALID) {
i ++;
}
}
return i;
}
/**
* Gets the number of valid factories.
* @param factories the factory list
* @return the number of valid factories.
*/
public static int getValidFactoriesCount(List<Factory> factories) {
int i = 0;
for (Factory a : factories) { // Cannot be null, an empty list is returned.
if (a.getState() == Factory.VALID) {
i ++;
}
}
return i;
}
/**
* Gets the number of invalid factories.
* @param factories the factory list
* @return the number of invalid factories.
*/
public static int getInvalidFactoriesCount(List<Factory> factories) {
int i = 0;
for (Factory a : factories) { // Cannot be null, an empty list is returned.
if (a.getState() == Factory.INVALID) {
i ++;
}
}
return i;
}
/**
* Gets the number of valid handlers.
* @param handlers the handler factory list
* @return the number of valid handlers.
*/
public static int getValidHandlersCount(List<HandlerFactory> handlers) {
int i = 0;
for (Factory a : handlers) { // Cannot be null, an empty list is returned.
if (a.getState() == Factory.VALID) {
i ++;
}
}
return i;
}
/**
* Gets the number of invalid handlers.
* @param handlers the handler factory list
* @return the number of invalid handlers.
*/
public static int getInvalidHandlersCount(List<HandlerFactory> handlers) {
int i = 0;
for (Factory a : handlers) { // Cannot be null, an empty list is returned.
if (a.getState() == Factory.INVALID) {
i ++;
}
}
return i;
}
/**
* Gets the instance state as a String.
* @param state the state.
* @return the String form of the state.
*/
public static String getInstanceState(int state) {
switch(state) {
case ComponentInstance.VALID :
return "valid";
case ComponentInstance.INVALID :
return "invalid";
case ComponentInstance.DISPOSED :
return "disposed";
case ComponentInstance.STOPPED :
return "stopped";
default :
return "unknown";
}
}
/**
* Gets the factory state as a String.
* @param state the state.
* @return the String form of the state.
*/
public static String getFactoryState(int state) {
switch(state) {
case Factory.VALID :
return "valid";
case Factory.INVALID :
return "invalid";
default :
return "unknown";
}
}
/**
* Gets the instance list created by the given factory.
* @param archs the list of instance architectures
* @param factory the factory name
* @return the list containing the created instances (name)
*/
public static List<String> getInstanceList(List<Architecture> archs, String factory) {
List<String> list = new ArrayList<String>();
for (Architecture arch : archs) { // Cannot be null, an empty list is returned.
String n = arch.getInstanceDescription().getComponentDescription().getName();
if (factory.equals(n)) {
list.add(arch.getInstanceDescription().getName());
}
}
return list;
}
/**
* Gets the dependency state as a String.
* @param state the state.
* @return the String form of the state.
*/
public static String getDependencyState(int state) {
switch(state) {
case DependencyModel.RESOLVED :
return "resolved";
case DependencyModel.UNRESOLVED :
return "unresolved";
case DependencyModel.BROKEN :
return "broken";
default :
return "unknown (" + state + ")";
}
}
/**
* Gets the dependency binding policy as a String.
* @param policy the policy.
* @return the String form of the policy.
*/
public static String getDependencyBindingPolicy(int policy) {
switch(policy) {
case DependencyModel.DYNAMIC_BINDING_POLICY :
return "dynamic";
case DependencyModel.DYNAMIC_PRIORITY_BINDING_POLICY :
return "dynamic-priority";
case DependencyModel.STATIC_BINDING_POLICY :
return "static";
default :
return "unknown (" + policy + ")";
}
}
/**
* Gets the provided service state as a String.
* @param state the state.
* @return the String form of the state.
*/
public static String getProvidedServiceState(int state) {
switch(state) {
case ProvidedService.REGISTERED :
return "registered";
case ProvidedService.UNREGISTERED :
return "unregistered";
default :
return "unknown (" + state + ")";
}
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.service.protocol;
import java.util.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.*;
/**
* Implements standard functionality of <tt>ProtocolProviderService</tt> in
* order to make it easier for implementers to provide complete solutions while
* focusing on protocol-specific details.
*
* @author Lyubomir Marinov
*/
public abstract class AbstractProtocolProviderService
implements ProtocolProviderService
{
/**
* The <tt>Logger</tt> instances used by the
* <tt>AbstractProtocolProviderService</tt> class and its instances for
* logging output.
*/
private static final Logger logger =
Logger.getLogger(AbstractProtocolProviderService.class);
/**
* A list of all listeners registered for
* <tt>RegistrationStateChangeEvent</tt>s.
*/
private final List<RegistrationStateChangeListener> registrationListeners =
new ArrayList<RegistrationStateChangeListener>();
/**
* The hashtable with the operation sets that we support locally.
*/
private final Map<String, OperationSet> supportedOperationSets
= new Hashtable<String, OperationSet>();
/**
* Registers the specified listener with this provider so that it would
* receive notifications on changes of its state or other properties such
* as its local address and display name.
*
* @param listener the listener to register.
*/
public void addRegistrationStateChangeListener(
RegistrationStateChangeListener listener)
{
if (listener == null)
{
throw new IllegalArgumentException("listener cannot be null");
}
synchronized(registrationListeners)
{
if (!registrationListeners.contains(listener))
registrationListeners.add(listener);
}
}
/**
* Adds a specific <tt>OperationSet</tt> implementation to the set of
* supported <tt>OperationSet</tt>s of this instance. Serves as a type-safe
* wrapper around {@link #supportedOperationSets} which works with class
* names instead of <tt>Class</tt> and also shortens the code which performs
* such additions.
*
* @param <T> the exact type of the <tt>OperationSet</tt> implementation to
* be added
* @param opsetClass the <tt>Class</tt> of <tt>OperationSet</tt> under the
* name of which the specified implementation is to be added
* @param opset the <tt>OperationSet</tt> implementation to be added
*/
protected <T extends OperationSet> void addSupportedOperationSet(
Class<T> opsetClass,
T opset)
{
supportedOperationSets.put(opsetClass.getName(), opset);
}
/**
* Removes an <tt>OperationSet</tt> implementation from the set of
* supported <tt>OperationSet</tt>s for this instance.
*
* @param <T> the exact type of the <tt>OperationSet</tt> implementation to
* be added
* @param opsetClass the <tt>Class</tt> of <tt>OperationSet</tt> under the
* name of which the specified implementation is to be added
*/
protected <T extends OperationSet> void removeSupportedOperationSet(
Class<T> opsetClass)
{
supportedOperationSets.remove(opsetClass.getName());
}
/**
* Removes all <tt>OperationSet</tt> implementation from the set of
* supported <tt>OperationSet</tt>s for this instance.
*/
protected void clearSupportedOperationSet()
{
supportedOperationSets.clear();
}
/**
* Creates a RegistrationStateChange event corresponding to the specified
* old and new states and notifies all currently registered listeners.
*
* @param oldState the state that the provider had before the change
* occurred
* @param newState the state that the provider is currently in.
* @param reasonCode a value corresponding to one of the REASON_XXX fields
* of the RegistrationStateChangeEvent class, indicating the reason for
* this state transition.
* @param reason a String further explaining the reason code or null if
* no such explanation is necessary.
*/
public void fireRegistrationStateChanged( RegistrationState oldState,
RegistrationState newState,
int reasonCode,
String reason)
{
this.fireRegistrationStateChanged(
oldState, newState, reasonCode, reason, false);
}
/**
* Creates a RegistrationStateChange event corresponding to the specified
* old and new states and notifies all currently registered listeners.
*
* @param oldState the state that the provider had before the change
* occurred
* @param newState the state that the provider is currently in.
* @param reasonCode a value corresponding to one of the REASON_XXX fields
* of the RegistrationStateChangeEvent class, indicating the reason for
* this state transition.
* @param reason a String further explaining the reason code or null if
* no such explanation is necessary.
* @param userRequest is the event by user request.
*/
public void fireRegistrationStateChanged( RegistrationState oldState,
RegistrationState newState,
int reasonCode,
String reason,
boolean userRequest)
{
RegistrationStateChangeEvent event =
new RegistrationStateChangeEvent(
this, oldState, newState, reasonCode, reason);
event.setUserRequest(userRequest);
RegistrationStateChangeListener[] listeners;
synchronized (registrationListeners)
{
listeners
= registrationListeners.toArray(
new RegistrationStateChangeListener[
registrationListeners.size()]);
}
if (logger.isDebugEnabled())
logger.debug(
"Dispatching " + event + " to " + listeners.length
+ " listeners.");
for (RegistrationStateChangeListener listener : listeners)
try
{
listener.registrationStateChanged(event);
}
catch (Throwable throwable)
{
/*
* The registration state has already changed and we're not
* using the RegistrationStateChangeListeners to veto the change
* so it doesn't make sense to, for example, disconnect because
* one of them malfunctioned.
*
* Of course, death cannot be ignored.
*/
if (throwable instanceof ThreadDeath)
throw (ThreadDeath) throwable;
logger.error(
"An error occurred while executing "
+ "RegistrationStateChangeListener"
+ "#registrationStateChanged"
+ "(RegistrationStateChangeEvent) of "
+ listener,
throwable);
}
if (logger.isTraceEnabled())
logger.trace("Done.");
}
/**
* Returns the operation set corresponding to the specified class or null if
* this operation set is not supported by the provider implementation.
*
* @param <T> the exact type of the <tt>OperationSet</tt> that we're looking
* for
* @param opsetClass the <tt>Class</tt> of the operation set that we're
* looking for.
* @return returns an <tt>OperationSet</tt> of the specified <tt>Class</tt>
* if the underlying implementation supports it; <tt>null</tt>, otherwise.
*/
@SuppressWarnings("unchecked")
public <T extends OperationSet> T getOperationSet(Class<T> opsetClass)
{
return (T) supportedOperationSets.get(opsetClass.getName());
}
/**
* Returns the protocol display name. This is the name that would be used
* by the GUI to display the protocol name.
*
* @return a String containing the display name of the protocol this service
* is implementing
*/
public String getProtocolDisplayName()
{
String displayName
= getAccountID().getAccountPropertyString(
ProtocolProviderFactory.PROTOCOL);
return (displayName == null) ? getProtocolName() : displayName;
}
/**
* Returns an array containing all operation sets supported by the current
* implementation. When querying this method users must be prepared to
* receive any subset of the OperationSet-s defined by this service. They
* MUST ignore any OperationSet-s that they are not aware of and that may be
* defined by future version of this service. Such "unknown" OperationSet-s
* though not encouraged, may also be defined by service implementors.
*
* @return a java.util.Map containing instance of all supported operation
* sets mapped against their class names (e.g.
* OperationSetPresence.class.getName()) .
*/
public Map<String, OperationSet> getSupportedOperationSets()
{
return new Hashtable<String, OperationSet>(supportedOperationSets);
}
/**
* Returns a collection containing all operation sets classes supported by
* the current implementation. When querying this method users must be
* prepared to receive any subset of the OperationSet-s defined by this
* service. They MUST ignore any OperationSet-s that they are not aware of
* and that may be defined by future versions of this service. Such
* "unknown" OperationSet-s though not encouraged, may also be defined by
* service implementors.
*
* @return a {@link Collection} containing instances of all supported
* operation set classes (e.g. <tt>OperationSetPresence.class</tt>.
*/
@SuppressWarnings("unchecked")
public Collection<Class<? extends OperationSet>>
getSupportedOperationSetClasses()
{
Collection<Class<? extends OperationSet>> opSetClasses
= new ArrayList<Class<? extends OperationSet>>();
Iterator<String> opSets
= getSupportedOperationSets().keySet().iterator();
while (opSets.hasNext())
{
String opSetClassName = opSets.next();
try
{
opSetClasses.add(
(Class<? extends OperationSet>) getSupportedOperationSets()
.get(opSetClassName).getClass().getClassLoader()
.loadClass(opSetClassName));
}
catch (ClassNotFoundException e)
{
e.printStackTrace();
}
}
return opSetClasses;
}
/**
* Indicates whether or not this provider is registered
*
* @return <tt>true</tt> if the provider is currently registered and
* <tt>false</tt> otherwise.
*/
public boolean isRegistered()
{
return getRegistrationState().equals(RegistrationState.REGISTERED);
}
/**
* Removes the specified registration state change listener so that it does
* not receive any further notifications upon changes of the
* RegistrationState of this provider.
*
* @param listener the listener to register for
* <tt>RegistrationStateChangeEvent</tt>s.
*/
public void removeRegistrationStateChangeListener(
RegistrationStateChangeListener listener)
{
synchronized(registrationListeners)
{
registrationListeners.remove(listener);
}
}
/**
* Clear all registration state change listeners.
*/
public void clearRegistrationStateChangeListener()
{
synchronized(registrationListeners)
{
registrationListeners.clear();
}
}
/**
* A clear display for ProtocolProvider when its printed in logs.
* @return the class name and the currently handled account.
*/
@Override
public String toString()
{
return getClass().getSimpleName() + "("
+ getAccountID().getDisplayName() + ")";
}
/**
* Ends the registration of this protocol provider with the current
* registration service. The default is just to call unregister. Providers
* that need to differentiate user requests (from the UI) or automatic
* unregister can override this method.
* @param userRequest is the unregister by user request.
* @throws OperationFailedException with the corresponding code it the
* registration fails for some reason (e.g. a networking error or an
* implementation problem).
*/
public void unregister(boolean userRequest)
throws OperationFailedException
{
this.unregister();
}
}
| |
/**
* Copyright (C) 2006 Dragos Balan (dragos.balan@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.reportengine.core.steps;
import java.util.ArrayList;
import java.util.List;
import net.sf.reportengine.components.CellProps;
import net.sf.reportengine.components.RowProps;
import net.sf.reportengine.config.DataColumn;
import net.sf.reportengine.config.GroupColumn;
import net.sf.reportengine.config.HorizAlign;
import net.sf.reportengine.config.VertAlign;
import net.sf.reportengine.core.algorithm.NewRowEvent;
import net.sf.reportengine.core.calc.CalcIntermResult;
import net.sf.reportengine.util.StepIOKeys;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FlatTableTotalsOutputStep extends AbstractOutputStep<String, Integer, Integer> {
/**
* the one and only logger
*/
private static final Logger LOGGER = LoggerFactory.getLogger(FlatTableTotalsOutputStep.class);
/**
* "Grand " label. To be used in front of Total, Average, Count etc.
*/
public static final String GRAND_TOTAL_STRING = "Grand ";
/**
* local copy of the group columns
*/
private List<GroupColumn> groupCols;
/**
* local copy of the data columns
*/
private List<DataColumn> dataCols;
/**
*
*/
private ArrayList<Integer> distribOfCalculatorsInDataColsArray;
/**
* the label of the calculators with whitespaces between
*/
private String calcLabels;
/**
* init method
*/
public StepResult<String> init(StepInput stepInput) {
groupCols = getGroupColumns(stepInput);
dataCols = getDataColumns(stepInput);
distribOfCalculatorsInDataColsArray = getCalculatorDistributionInColumnDataArray(stepInput);
calcLabels = getLabelsForAllCalculators(stepInput);
LOGGER.trace("The FlatReportTotalsOutputStep has been initialized. The distribution array is null {}",
(distribOfCalculatorsInDataColsArray == null));
return StepResult.NO_RESULT;
}
/**
* execute
*/
public StepResult<Integer> execute(NewRowEvent rowEvent, StepInput stepInput) {
int currentDataRowNumber = getDataRowCount(stepInput).intValue();
int groupLevel = getGroupingLevel(stepInput);
// when non simple data row
if (groupLevel >= 0 && getShowTotals(stepInput)) {
int totalRowEnd = computeCalcRowNumberForAggLevel(stepInput, groupLevel);
// TODO: this operation is the opposite of
// computeAggLevelForCalcRowNumber which is
// called inside outputTotalRowsFromTo. One of them should be
// deleted.
// output totals from level 0 to current grouping level
outputTotalRowsFromTo(stepInput, 0, totalRowEnd, currentDataRowNumber);
// the count from 0 to totalRowEnd is totalRowEnd+1
currentDataRowNumber = currentDataRowNumber + totalRowEnd + 1;
LOGGER.trace("not displaying totals because current level is {}", groupLevel);
}
return new StepResult<Integer>(StepIOKeys.DATA_ROW_COUNT,
Integer.valueOf(currentDataRowNumber));
}
/**
* exit displays the last totals in the calculator matrix buffer and the
* grand total
*/
public StepResult<Integer> exit(StepInput stepInput) {
int localDataRowNbr = getDataRowCount(stepInput).intValue();
CalcIntermResult[][] calcIntermResults = getCalcIntermResultsMatrix(stepInput);
if (groupCols != null && getShowTotals(stepInput)) {
// calculators.length-2 because for levelCalculators.length-1 is a
// separate call
// (a call for Grand total see below)
outputTotalRowsFromTo(stepInput, 0, calcIntermResults.length - 2, localDataRowNbr);
localDataRowNbr = localDataRowNbr + calcIntermResults.length - 1;
// length-1 because we output from 0 to length-2 (included)
}
// now the grand total
if (getShowGrandTotal(stepInput)) {
// outputTotalsRow("Grand Total",
// calculators[calculators.length-1]);
outputTotalsRow(stepInput,
GRAND_TOTAL_GROUPING_LEVEL,
calcIntermResults[calcIntermResults.length - 1],
localDataRowNbr);
localDataRowNbr = localDataRowNbr + 1;
}
return new StepResult<Integer>(StepIOKeys.DATA_ROW_COUNT, Integer.valueOf(localDataRowNbr));
}
/**
* outputs the totals existing on the specified rows starting with rowStart
* and ending with rowEnnd (inclusive)
*
* @param rowStart
* the first row to start outputting
* @param rowEnd
* the last row to output
*/
private void outputTotalRowsFromTo(StepInput stepInput,
int rowStart,
int rowEnd,
int dataRowNumber) {
LOGGER.trace("output totals from {} to {}", rowStart, rowEnd);
CalcIntermResult[][] calcResults = getCalcIntermResultsMatrix(stepInput);
for (int row = rowStart; row <= rowEnd; row++) {
// based on the row we can compute the aggregation level so that we
// can determine the
// column to use from the previous data row
int aggLevel = computeAggLevelForCalcRowNumber(stepInput, row);
outputTotalsRow(stepInput, aggLevel, calcResults[row], dataRowNumber);
dataRowNumber++;
}
}
/**
*
* @param groupLevel
* @param calcResultForCurrentGrpLevel
*/
private void outputTotalsRow(StepInput stepInput,
int groupLevel,
CalcIntermResult[] calcResultForCurrentGrpLevel,
int dataRowNumber) {
// ReportOutput output = getReportOutput(stepInput);
// output.startDataRow(new RowProps(dataRowNumber));
outputOneValue(stepInput,
DataRowsOutputStep.START_DATA_ROW_TEMPLATE,
new RowProps(dataRowNumber));
if (groupCols != null && groupCols.size() > 0) {
// prepare and output the Total column
String totalString = getTotalStringForGroupingLevel(stepInput, calcLabels, groupLevel);
CellProps cellProps =
new CellProps.Builder(totalString).horizAlign(HorizAlign.LEFT)
.vertAlign(VertAlign.MIDDLE)
.rowNumber(dataRowNumber)
.build();
// output.outputDataCell(cellProps);
outputOneValue(stepInput, DataRowsOutputStep.DATA_CELL_TEMPLATE, cellProps);
if (groupCols.size() > 1) {
// for all other grouping columns put white spaces
// (groupColumns.length-1 colspan because the first column was
// already filled with the word "Total xxxx"
// if you want a single cell spanning multiple rows un-comment
// below
// output.output(new CellProps.Builder(ReportOutput.WHITESPACE)
// .colspan(groupCols.size()-1)
// .build());
// this is to display an empty cell for every remaining group
// column
for (int i = 1; i < groupCols.size(); i++) {
CellProps whitespaceCellProps =
new CellProps.Builder(CellProps.WHITESPACE).rowNumber(dataRowNumber)
.build();
// output.outputDataCell(whitespaceCellProps);
outputOneValue(stepInput,
DataRowsOutputStep.DATA_CELL_TEMPLATE,
whitespaceCellProps);
}
}
}
String formattedResult = null;
// then iterate over data columns to display the totals for those having
// calculators
for (int i = 0; i < dataCols.size(); i++) {
DataColumn column = dataCols.get(i);
if (column.getCalculator() != null) {
int calculatorIndex = distribOfCalculatorsInDataColsArray.get(i).intValue();
Object calculatorResult = calcResultForCurrentGrpLevel[calculatorIndex].getResult();
// format the computed value
formattedResult = dataCols.get(i).getFormattedTotal(calculatorResult);
CellProps dataCellProps =
new CellProps.Builder(formattedResult).horizAlign(dataCols.get(i)
.getHorizAlign())
.vertAlign(dataCols.get(i).getVertAlign())
.rowNumber(dataRowNumber)
.build();
// output.outputDataCell(dataCellProps);
outputOneValue(stepInput, DataRowsOutputStep.DATA_CELL_TEMPLATE, dataCellProps);
} else {
// if the column doesn't have a calculator associated
// then display an empty value (whitespace) with col span 1
CellProps whitespaceCellProps =
new CellProps.Builder(CellProps.WHITESPACE).rowNumber(dataRowNumber).build();
// output.outputDataCell(whitespaceCellProps);
outputOneValue(stepInput,
DataRowsOutputStep.DATA_CELL_TEMPLATE,
whitespaceCellProps);
}
}
// output.endDataRow();
outputNoValue(stepInput, DataRowsOutputStep.END_DATA_ROW_TEMPLATE);
}
/**
*
* @param stepInput
* @return
*/
protected String getLabelsForAllCalculators(StepInput stepInput) {
StringBuilder result = new StringBuilder();
for (DataColumn dataColumn : dataCols) {
if (dataColumn.getCalculator() != null) {
result.append(dataColumn.getCalculator().getLabel()).append(" ");
}
}
return result.toString();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.client.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.kie.internal.process.CorrelationKey;
import org.kie.server.api.commands.CommandScript;
import org.kie.server.api.commands.DescriptorCommand;
import org.kie.server.api.model.KieServerCommand;
import org.kie.server.api.model.ServiceResponse;
import org.kie.server.api.model.Wrapped;
import org.kie.server.api.model.definition.AssociatedEntitiesDefinition;
import org.kie.server.api.model.definition.ProcessDefinition;
import org.kie.server.api.model.definition.ProcessStartSpec;
import org.kie.server.api.model.definition.ServiceTasksDefinition;
import org.kie.server.api.model.definition.SubProcessesDefinition;
import org.kie.server.api.model.definition.TaskInputsDefinition;
import org.kie.server.api.model.definition.TaskOutputsDefinition;
import org.kie.server.api.model.definition.UserTaskDefinitionList;
import org.kie.server.api.model.definition.VariablesDefinition;
import org.kie.server.api.model.instance.NodeInstance;
import org.kie.server.api.model.instance.NodeInstanceList;
import org.kie.server.api.model.instance.ProcessInstance;
import org.kie.server.api.model.instance.ProcessInstanceList;
import org.kie.server.api.model.instance.VariableInstance;
import org.kie.server.api.model.instance.VariableInstanceList;
import org.kie.server.api.model.instance.WorkItemInstance;
import org.kie.server.api.model.instance.WorkItemInstanceList;
import org.kie.server.api.rest.RestURI;
import org.kie.server.client.KieServicesConfiguration;
import org.kie.server.client.ProcessServicesClient;
import static java.util.Collections.singletonList;
import static org.kie.server.api.rest.RestURI.ABORT_PROCESS_INSTANCES_DEL_URI;
import static org.kie.server.api.rest.RestURI.ABORT_PROCESS_INST_DEL_URI;
import static org.kie.server.api.rest.RestURI.CONTAINER_ID;
import static org.kie.server.api.rest.RestURI.CORRELATION_KEY;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_ASSOCIATED_ENTITIES_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_SERVICE_TASKS_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_SUBPROCESS_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_USER_TASKS_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_USER_TASK_INPUT_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_USER_TASK_OUTPUT_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_DEF_VARIABLES_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_ID;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCES_BY_CONTAINER_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCES_BY_PARENT_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCES_NODE_INSTANCES_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_SIGNALS_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VARS_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VARS_POST_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VAR_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VAR_INSTANCES_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VAR_INSTANCE_BY_VAR_NAME_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_VAR_PUT_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_WORK_ITEMS_BY_PROC_INST_ID_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_WORK_ITEM_ABORT_PUT_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_WORK_ITEM_BY_ID_GET_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INSTANCE_WORK_ITEM_COMPLETE_PUT_URI;
import static org.kie.server.api.rest.RestURI.PROCESS_INST_ID;
import static org.kie.server.api.rest.RestURI.PROCESS_URI;
import static org.kie.server.api.rest.RestURI.SIGNAL_NAME;
import static org.kie.server.api.rest.RestURI.SIGNAL_PROCESS_BY_CORRELATION_KEY_POST_URI;
import static org.kie.server.api.rest.RestURI.SIGNAL_PROCESS_INSTANCES_PORT_URI;
import static org.kie.server.api.rest.RestURI.SIGNAL_PROCESS_INST_POST_URI;
import static org.kie.server.api.rest.RestURI.START_PROCESS_FROM_NODES_POST_URI;
import static org.kie.server.api.rest.RestURI.START_PROCESS_FROM_NODES_WITH_CORRELATION_KEY_POST_URI;
import static org.kie.server.api.rest.RestURI.START_PROCESS_POST_URI;
import static org.kie.server.api.rest.RestURI.START_PROCESS_WITH_CORRELATION_KEY_POST_URI;
import static org.kie.server.api.rest.RestURI.COMPUTE_PROCESS_OUTCOME_POST_URI;
import static org.kie.server.api.rest.RestURI.TASK_NAME;
import static org.kie.server.api.rest.RestURI.VAR_NAME;
import static org.kie.server.api.rest.RestURI.WORK_ITEM_ID;
import static org.kie.server.api.rest.RestURI.build;
public class ProcessServicesClientImpl extends AbstractKieServicesClientImpl implements ProcessServicesClient {
public ProcessServicesClientImpl(KieServicesConfiguration config) {
super(config);
}
public ProcessServicesClientImpl(KieServicesConfiguration config, ClassLoader classLoader) {
super(config, classLoader);
}
@Override
public ProcessDefinition getProcessDefinition(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_GET_URI, valuesMap),
ProcessDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getProcessDefinition", new Object[]{containerId, processId})));
ServiceResponse<ProcessDefinition> response = (ServiceResponse<ProcessDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public SubProcessesDefinition getReusableSubProcessDefinitions(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_SUBPROCESS_GET_URI, valuesMap),
SubProcessesDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getReusableSubProcesses", new Object[]{containerId, processId})));
ServiceResponse<SubProcessesDefinition> response = (ServiceResponse<SubProcessesDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public VariablesDefinition getProcessVariableDefinitions(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_VARIABLES_GET_URI, valuesMap),
VariablesDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getProcessVariables", new Object[]{containerId, processId})));
ServiceResponse<VariablesDefinition> response = (ServiceResponse<VariablesDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public ServiceTasksDefinition getServiceTaskDefinitions(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_SERVICE_TASKS_GET_URI, valuesMap),
ServiceTasksDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getServiceTasks", new Object[]{containerId, processId})));
ServiceResponse<ServiceTasksDefinition> response = (ServiceResponse<ServiceTasksDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public AssociatedEntitiesDefinition getAssociatedEntityDefinitions(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_ASSOCIATED_ENTITIES_GET_URI, valuesMap),
AssociatedEntitiesDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getAssociatedEntities", new Object[]{containerId, processId})));
ServiceResponse<AssociatedEntitiesDefinition> response = (ServiceResponse<AssociatedEntitiesDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public UserTaskDefinitionList getUserTaskDefinitions(String containerId, String processId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_USER_TASKS_GET_URI, valuesMap),
UserTaskDefinitionList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getTasksDefinitions", new Object[]{containerId, processId})));
ServiceResponse<UserTaskDefinitionList> response = (ServiceResponse<UserTaskDefinitionList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public TaskInputsDefinition getUserTaskInputDefinitions(String containerId, String processId, String taskName) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
valuesMap.put(TASK_NAME, encode(taskName));
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_USER_TASK_INPUT_GET_URI, valuesMap),
TaskInputsDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getTaskInputMappings", new Object[]{containerId, processId, taskName})));
ServiceResponse<TaskInputsDefinition> response = (ServiceResponse<TaskInputsDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public TaskOutputsDefinition getUserTaskOutputDefinitions(String containerId, String processId, String taskName) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
valuesMap.put(TASK_NAME, encode(taskName));
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_DEF_URI + "/" + PROCESS_DEF_USER_TASK_OUTPUT_GET_URI, valuesMap),
TaskOutputsDefinition.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("DefinitionService", "getTaskOutputMappings", new Object[]{containerId, processId, taskName})));
ServiceResponse<TaskOutputsDefinition> response = (ServiceResponse<TaskOutputsDefinition>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return response.getResult();
}
}
@Override
public Long startProcess(String containerId, String processId) {
return startProcess(containerId, processId, new HashMap<String, Object>());
}
@Override
public Long startProcess(String containerId, String processId, Map<String, Object> variables) {
Object result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
result = makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + START_PROCESS_POST_URI, valuesMap), variables,
Object.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "startProcess", serialize(safeMap(variables)), marshaller.getFormat().getType(), new Object[]{containerId, processId}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), Object.class);
}
if (result instanceof Wrapped) {
return (Long) ((Wrapped) result).unwrap();
}
return ((Number) result).longValue();
}
@Override
public Map<String, Object> computeProcessOutcome(String containerId, String processId, Map<String, Object> variables) {
Object result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
result = makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + COMPUTE_PROCESS_OUTCOME_POST_URI, valuesMap), variables,
Object.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "computeProcessOutcome", serialize(safeMap(variables)), marshaller.getFormat().getType(), new Object[]{containerId, processId}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), Map.class);
}
return (Map<String, Object>) result;
}
@Override
public Long startProcessFromNodeIds(String containerId, String processId, Map<String, Object> variables, String... nodes) {
Object result = null;
ProcessStartSpec spec = new ProcessStartSpec();
spec.setVariables(variables);
spec.setNodeIds(Arrays.asList(nodes));
if (config.isRest()) {
Map<String, Object> valuesMap = new HashMap<>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
result = makeHttpPostRequestAndCreateCustomResponse(build(loadBalancer.getUrl(), PROCESS_URI + "/" + START_PROCESS_FROM_NODES_POST_URI, valuesMap), spec, Object.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("ProcessService", "startProcessFromNodeIds",
serialize(spec), marshaller.getFormat().getType(),
containerId, processId)));
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand(script, DescriptorCommand.class.getName(), "BPM", containerId).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), Object.class);
}
if (result instanceof Wrapped) {
return (Long) ((Wrapped) result).unwrap();
}
return ((Number) result).longValue();
}
@Override
public Long startProcessFromNodeIds(String containerId, String processId, CorrelationKey correlationKey, Map<String, Object> variables, String... nodes) {
Object result = null;
ProcessStartSpec spec = new ProcessStartSpec();
spec.setVariables(variables);
spec.setNodeIds(Arrays.asList(nodes));
if (config.isRest()) {
Map<String, Object> valuesMap = new HashMap<>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
valuesMap.put(CORRELATION_KEY, correlationKey.toExternalForm());
result = makeHttpPostRequestAndCreateCustomResponse(build(loadBalancer.getUrl(),
PROCESS_URI + "/" + START_PROCESS_FROM_NODES_WITH_CORRELATION_KEY_POST_URI, valuesMap),
spec, Object.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("ProcessService", "startProcessWithCorrelationKeyFromNodeIds",
serialize(spec), marshaller.getFormat().getType(),
containerId, processId, correlationKey.toExternalForm())));
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand(script, DescriptorCommand.class.getName(), "BPM", containerId).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), Object.class);
}
if (result instanceof Wrapped) {
return (Long) ((Wrapped) result).unwrap();
}
return ((Number) result).longValue();
}
@Override
public Long startProcess(String containerId, String processId, CorrelationKey correlationKey) {
return startProcess(containerId, processId, correlationKey, null);
}
@Override
public Long startProcess(String containerId, String processId, CorrelationKey correlationKey, Map<String, Object> variables) {
Object result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_ID, processId);
valuesMap.put(CORRELATION_KEY, correlationKey.toExternalForm());
result = makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + START_PROCESS_WITH_CORRELATION_KEY_POST_URI, valuesMap), variables,
Object.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "startProcessWithCorrelation", serialize(safeMap(variables)), marshaller.getFormat().getType(), new Object[]{containerId, processId, correlationKey.toExternalForm()}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), Object.class);
}
if (result instanceof Wrapped) {
return (Long) ((Wrapped) result).unwrap();
}
return ((Number) result).longValue();
}
@Override
public void abortProcessInstance(String containerId, Long processInstanceId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
makeHttpDeleteRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + ABORT_PROCESS_INST_DEL_URI, valuesMap),
null);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "abortProcessInstance", new Object[]{containerId, processInstanceId})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void abortProcessInstances(String containerId, List<Long> processInstanceIds) {
if( config.isRest() ) {
String queryStr = buildQueryString("instanceId", processInstanceIds);
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
makeHttpDeleteRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + ABORT_PROCESS_INSTANCES_DEL_URI, valuesMap) + queryStr,
null);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "abortProcessInstances", new Object[]{containerId, processInstanceIds})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public Object getProcessInstanceVariable(String containerId, Long processInstanceId, String variableName) {
return getProcessInstanceVariable(containerId, processInstanceId, variableName, Object.class);
}
@Override
public <T> T getProcessInstanceVariable(String containerId, Long processInstanceId, String variableName, Class<T> type) {
Object result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(VAR_NAME, variableName);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VAR_GET_URI, valuesMap), type);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getProcessInstanceVariable", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, variableName}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = deserialize(response.getResult(), type);
}
if (result instanceof Wrapped) {
return (T) ((Wrapped) result).unwrap();
}
return (T) result;
}
@Override
public Map<String, Object> getProcessInstanceVariables(String containerId, Long processInstanceId) {
Object variables = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
variables = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VARS_GET_URI, valuesMap),
Object.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getProcessInstanceVariables", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
variables = deserialize(response.getResult(), Object.class);
}
if (variables instanceof Wrapped) {
return (Map) ((Wrapped) variables).unwrap();
}
return (Map) variables;
}
@Override
public void signalProcessInstance(String containerId, Long processInstanceId, String signalName, Object event) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(SIGNAL_NAME, signalName);
Map<String, String> headers = new HashMap<String, String>();
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + SIGNAL_PROCESS_INST_POST_URI, valuesMap), event, String.class, headers);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "signalProcessInstance", serialize(event), marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, signalName})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void signalProcessInstances(String containerId, List<Long> processInstanceIds, String signalName, Object event) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(SIGNAL_NAME, signalName);
String queryStr = buildQueryString("instanceId", processInstanceIds);
Map<String, String> headers = new HashMap<String, String>();
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + SIGNAL_PROCESS_INSTANCES_PORT_URI, valuesMap) + queryStr
, event, String.class, headers);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "signalProcessInstances", serialize(event), marshaller.getFormat().getType(), new Object[]{containerId, processInstanceIds, signalName})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void signalProcessInstanceByCorrelationKey(String containerId,
CorrelationKey correlationKey,
String signalName,
Object event) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(CORRELATION_KEY, correlationKey.toExternalForm());
valuesMap.put(SIGNAL_NAME, signalName);
Map<String, String> headers = new HashMap<String, String>();
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + SIGNAL_PROCESS_BY_CORRELATION_KEY_POST_URI, valuesMap), event, String.class, headers);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "signalProcessInstanceByCorrelationKey", serialize(event), marshaller.getFormat().getType(), new Object[]{containerId, correlationKey.toExternalForm(), signalName})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void signalProcessInstancesByCorrelationKeys(String containerId,
List<CorrelationKey> correlationKeys,
String signalName,
Object event) {
List<String> keys = correlationKeys.stream().map(CorrelationKey::toExternalForm).collect(Collectors.toList());
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(SIGNAL_NAME, signalName);
String queryStr = buildQueryString("correlationKeys", keys);
Map<String, String> headers = new HashMap<String, String>();
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + SIGNAL_PROCESS_INSTANCES_PORT_URI, valuesMap) + queryStr
, event, String.class, headers);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "signalProcessInstancesByCorrelationKey", serialize(event), marshaller.getFormat().getType(), new Object[]{containerId, keys, signalName})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void signal(String containerId, String signalName, Object event) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(SIGNAL_NAME, signalName);
Map<String, String> headers = new HashMap<String, String>();
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + SIGNAL_PROCESS_INSTANCES_PORT_URI, valuesMap), event, String.class, headers);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "signal", serialize(event), marshaller.getFormat().getType(), new Object[]{containerId, signalName})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public List<String> getAvailableSignals(String containerId, Long processInstanceId) {
Object signals = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
signals = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_SIGNALS_GET_URI, valuesMap), Object.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getAvailableSignals", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
signals = deserialize(response.getResult(), Object.class);
}
if (signals instanceof Wrapped) {
return (List<String>) ((Wrapped)signals).unwrap();
}
return (List<String>) signals;
}
@Override
public void setProcessVariable(String containerId, Long processInstanceId, String variableId, Object value) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(VAR_NAME, variableId);
makeHttpPutRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VAR_PUT_URI, valuesMap), value, String.class, getHeaders(null));
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "setProcessVariable", serialize(value), marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, variableId})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void setProcessVariables(String containerId, Long processInstanceId, Map<String, Object> variables) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
makeHttpPostRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VARS_POST_URI, valuesMap), variables,
String.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "setProcessVariables", serialize(variables), marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public ProcessInstance getProcessInstance(String containerId, Long processInstanceId) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_GET_URI, valuesMap) , ProcessInstance.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getProcessInstance", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, false} )) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return deserialize(response.getResult(), ProcessInstance.class);
}
}
@Override
public ProcessInstance getProcessInstance(String containerId, Long processInstanceId, boolean withVars) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_GET_URI, valuesMap) + "?withVars=" + withVars , ProcessInstance.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getProcessInstance", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, withVars}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return deserialize(response.getResult(), ProcessInstance.class);
}
}
@Override
public void completeWorkItem(String containerId, Long processInstanceId, Long id, Map<String, Object> results) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(WORK_ITEM_ID, id);
makeHttpPutRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_WORK_ITEM_COMPLETE_PUT_URI, valuesMap), results,
String.class, getHeaders(null));
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "completeWorkItem", serialize(safeMap(results)), marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, id})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public void abortWorkItem(String containerId, Long processInstanceId, Long id) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(WORK_ITEM_ID, id);
makeHttpPutRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_WORK_ITEM_ABORT_PUT_URI, valuesMap), null,
String.class, getHeaders(null));
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "abortWorkItem", new Object[]{containerId, processInstanceId, id})));
ServiceResponse<?> response = (ServiceResponse<?>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
}
}
@Override
public WorkItemInstance getWorkItem(String containerId, Long processInstanceId, Long id) {
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(WORK_ITEM_ID, id);
return makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_WORK_ITEM_BY_ID_GET_URI, valuesMap), WorkItemInstance.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getWorkItem", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId, id}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
return deserialize(response.getResult(), WorkItemInstance.class);
}
}
@Override
public List<WorkItemInstance> getWorkItemByProcessInstance(String containerId, Long processInstanceId) {
WorkItemInstanceList list = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
list = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_WORK_ITEMS_BY_PROC_INST_ID_GET_URI, valuesMap), WorkItemInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList(
(KieServerCommand) new DescriptorCommand( "ProcessService", "getWorkItemByProcessInstance", marshaller.getFormat().getType(), new Object[]{containerId, processInstanceId}) ) );
ServiceResponse<String> response = (ServiceResponse<String>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM", containerId ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
list = deserialize(response.getResult(), WorkItemInstanceList.class);
}
if (list != null && list.getWorkItems() != null) {
return Arrays.asList(list.getWorkItems());
}
return Collections.emptyList();
}
@Override
public List<NodeInstance> findActiveNodeInstances(String containerId, Long processInstanceId, Integer page, Integer pageSize) {
NodeInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
String queryString = getPagingQueryString("?activeOnly=true", page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_NODE_INSTANCES_GET_URI, valuesMap) + queryString, NodeInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "QueryService", "getProcessInstanceHistory", new Object[]{processInstanceId, true, false, page, pageSize}) ) );
ServiceResponse<NodeInstanceList> response = (ServiceResponse<NodeInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getNodeInstances() != null) {
return Arrays.asList(result.getNodeInstances());
}
return Collections.emptyList();
}
@Override
public List<NodeInstance> findCompletedNodeInstances(String containerId, Long processInstanceId, Integer page, Integer pageSize) {
NodeInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
String queryString = getPagingQueryString("?completedOnly=true", page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_NODE_INSTANCES_GET_URI, valuesMap) + queryString, NodeInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "QueryService", "getProcessInstanceHistory", new Object[]{processInstanceId, false, true, page, pageSize}) ) );
ServiceResponse<NodeInstanceList> response = (ServiceResponse<NodeInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getNodeInstances() != null) {
return Arrays.asList(result.getNodeInstances());
}
return Collections.emptyList();
}
@Override
public List<NodeInstance> findNodeInstancesByType(String containerId, Long processInstanceId, String entryType, Integer page, Integer pageSize) {
NodeInstanceList result = null;
if (config.isRest()) {
Map<String, Object> valuesMap = new HashMap<>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
String queryString = getPagingQueryString("?" + RestURI.PROCESS_INST_HISTORY_TYPE + "=" + entryType, page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_NODE_INSTANCES_GET_URI, valuesMap) + queryString, NodeInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand) new DescriptorCommand("QueryService", "getProcessInstanceFullHistoryByType",
processInstanceId, entryType, page, pageSize)));
ServiceResponse<NodeInstanceList> response = (ServiceResponse<NodeInstanceList>) executeJmsCommand(script, DescriptorCommand.class.getName(), "BPM").getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getNodeInstances() != null) {
return Arrays.asList(result.getNodeInstances());
}
return Collections.emptyList();
}
@Override
public List<NodeInstance> findNodeInstances(String containerId, Long processInstanceId, Integer page, Integer pageSize) {
NodeInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
String queryString = getPagingQueryString("", page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_NODE_INSTANCES_GET_URI, valuesMap) + queryString, NodeInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "QueryService", "getProcessInstanceHistory", new Object[]{processInstanceId, true, true, page, pageSize}) ) );
ServiceResponse<NodeInstanceList> response = (ServiceResponse<NodeInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getNodeInstances() != null) {
return Arrays.asList(result.getNodeInstances());
}
return Collections.emptyList();
}
@Override
public List<VariableInstance> findVariablesCurrentState(String containerId, Long processInstanceId) {
VariableInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VAR_INSTANCES_GET_URI, valuesMap), VariableInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "QueryService", "getVariablesCurrentState", new Object[]{processInstanceId}) ) );
ServiceResponse<VariableInstanceList> response = (ServiceResponse<VariableInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getVariableInstances() != null) {
return Arrays.asList(result.getVariableInstances());
}
return Collections.emptyList();
}
@Override
public List<VariableInstance> findVariableHistory(String containerId, Long processInstanceId, String variableName, Integer page, Integer pageSize) {
VariableInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, processInstanceId);
valuesMap.put(VAR_NAME, variableName);
String queryString = getPagingQueryString("", page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCE_VAR_INSTANCE_BY_VAR_NAME_GET_URI, valuesMap) + queryString, VariableInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "QueryService", "getVariableHistory", new Object[]{processInstanceId, variableName, page, pageSize}) ) );
ServiceResponse<VariableInstanceList> response = (ServiceResponse<VariableInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getVariableInstances() != null) {
return Arrays.asList(result.getVariableInstances());
}
return Collections.emptyList();
}
@Override
public List<ProcessInstance> findProcessInstancesByParent(String containerId, Long parentProcessInstanceId, Integer page, Integer pageSize) {
return findProcessInstancesByParent(containerId, parentProcessInstanceId, null, page, pageSize);
}
@Override
public List<ProcessInstance> findProcessInstancesByParent(String containerId, Long parentProcessInstanceId, List<Integer> status, Integer page, Integer pageSize) {
return findProcessInstancesByParent(containerId, parentProcessInstanceId, status, page, pageSize, "", true);
}
@Override
public List<ProcessInstance> findProcessInstancesByParent(String containerId, Long parentProcessInstanceId, List<Integer> status, Integer page, Integer pageSize, String sort, boolean sortOrder) {
ProcessInstanceList result = null;
if( config.isRest() ) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
valuesMap.put(PROCESS_INST_ID, parentProcessInstanceId);
String statusQueryString = getAdditionalParams("?sort="+sort+"&sortOrder="+sortOrder, "status", status);
String queryString = getPagingQueryString(statusQueryString, page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_BY_PARENT_GET_URI, valuesMap) + queryString, ProcessInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand( "ProcessService", "getProcessInstancesByParent", new Object[]{parentProcessInstanceId, safeList(status), page, pageSize, sort, sortOrder}) ) );
ServiceResponse<ProcessInstanceList> response = (ServiceResponse<ProcessInstanceList>) executeJmsCommand( script, DescriptorCommand.class.getName(), "BPM" ).getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getProcessInstances() != null) {
return Arrays.asList(result.getProcessInstances());
}
return Collections.emptyList();
}
@Override
public List<ProcessInstance> findProcessInstances(String containerId, Integer page, Integer pageSize) {
return findProcessInstances(containerId, page, pageSize, "", true);
}
@Override
public List<ProcessInstance> findProcessInstances(String containerId, Integer page, Integer pageSize, String sort, boolean sortOrder) {
ProcessInstanceList result = null;
if (config.isRest()) {
Map<String, Object> valuesMap = new HashMap<String, Object>();
valuesMap.put(CONTAINER_ID, containerId);
String queryString = getPagingQueryString("?sort=" + sort + "&sortOrder=" + sortOrder, page, pageSize);
result = makeHttpGetRequestAndCreateCustomResponse(
build(loadBalancer.getUrl(), PROCESS_URI + "/" + PROCESS_INSTANCES_BY_CONTAINER_GET_URI, valuesMap) + queryString, ProcessInstanceList.class);
} else {
CommandScript script = new CommandScript(singletonList((KieServerCommand)
new DescriptorCommand("QueryService", "getProcessInstances", new Object[]{new ArrayList(), "", "", page, pageSize, sort, sortOrder})));
ServiceResponse<ProcessInstanceList> response = (ServiceResponse<ProcessInstanceList>) executeJmsCommand(script, DescriptorCommand.class.getName(), "BPM").getResponses().get(0);
throwExceptionOnFailure(response);
if (shouldReturnWithNullResponse(response)) {
return null;
}
result = response.getResult();
}
if (result != null && result.getProcessInstances() != null) {
return Arrays.asList(result.getProcessInstances());
}
return Collections.emptyList();
}
}
| |
package com.plusub.lib.view.photoview;
/*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.ScaleGestureDetector.OnScaleGestureListener;
import android.view.VelocityTracker;
import android.view.ViewConfiguration;
public abstract class VersionedGestureDetector {
static final String LOG_TAG = "VersionedGestureDetector";
OnGestureListener mListener;
public static VersionedGestureDetector newInstance(Context context, OnGestureListener listener) {
final int sdkVersion = Build.VERSION.SDK_INT;
VersionedGestureDetector detector = null;
if (sdkVersion < Build.VERSION_CODES.ECLAIR) {
detector = new CupcakeDetector(context);
} else if (sdkVersion < Build.VERSION_CODES.FROYO) {
detector = new EclairDetector(context);
} else {
detector = new FroyoDetector(context);
}
detector.mListener = listener;
return detector;
}
public abstract boolean onTouchEvent(MotionEvent ev);
public abstract boolean isScaling();
public static interface OnGestureListener {
public void onDrag(float dx, float dy);
public void onFling(float startX, float startY, float velocityX, float velocityY);
public void onScale(float scaleFactor, float focusX, float focusY);
}
private static class CupcakeDetector extends VersionedGestureDetector {
float mLastTouchX;
float mLastTouchY;
final float mTouchSlop;
final float mMinimumVelocity;
public CupcakeDetector(Context context) {
final ViewConfiguration configuration = ViewConfiguration.get(context);
mMinimumVelocity = configuration.getScaledMinimumFlingVelocity();
mTouchSlop = configuration.getScaledTouchSlop();
}
private VelocityTracker mVelocityTracker;
private boolean mIsDragging;
float getActiveX(MotionEvent ev) {
return ev.getX();
}
float getActiveY(MotionEvent ev) {
return ev.getY();
}
public boolean isScaling() {
return false;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN: {
mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(ev);
mLastTouchX = getActiveX(ev);
mLastTouchY = getActiveY(ev);
mIsDragging = false;
break;
}
case MotionEvent.ACTION_MOVE: {
final float x = getActiveX(ev);
final float y = getActiveY(ev);
final float dx = x - mLastTouchX, dy = y - mLastTouchY;
if (!mIsDragging) {
// Use Pythagoras to see if drag length is larger than
// touch slop
mIsDragging = Math.sqrt((dx * dx) + (dy * dy)) >= mTouchSlop;
}
if (mIsDragging) {
mListener.onDrag(dx, dy);
mLastTouchX = x;
mLastTouchY = y;
if (null != mVelocityTracker) {
mVelocityTracker.addMovement(ev);
}
}
break;
}
case MotionEvent.ACTION_CANCEL: {
// Recycle Velocity Tracker
if (null != mVelocityTracker) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
break;
}
case MotionEvent.ACTION_UP: {
if (mIsDragging) {
if (null != mVelocityTracker) {
mLastTouchX = getActiveX(ev);
mLastTouchY = getActiveY(ev);
// Compute velocity within the last 1000ms
mVelocityTracker.addMovement(ev);
mVelocityTracker.computeCurrentVelocity(1000);
final float vX = mVelocityTracker.getXVelocity(), vY = mVelocityTracker.getYVelocity();
// If the velocity is greater than minVelocity, call
// listener
if (Math.max(Math.abs(vX), Math.abs(vY)) >= mMinimumVelocity) {
mListener.onFling(mLastTouchX, mLastTouchY, -vX, -vY);
}
}
}
// Recycle Velocity Tracker
if (null != mVelocityTracker) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
break;
}
}
return true;
}
}
@TargetApi(5)
private static class EclairDetector extends CupcakeDetector {
private static final int INVALID_POINTER_ID = -1;
private int mActivePointerId = INVALID_POINTER_ID;
private int mActivePointerIndex = 0;
public EclairDetector(Context context) {
super(context);
}
@Override
float getActiveX(MotionEvent ev) {
try {
return ev.getX(mActivePointerIndex);
} catch (Exception e) {
return ev.getX();
}
}
@Override
float getActiveY(MotionEvent ev) {
try {
return ev.getY(mActivePointerIndex);
} catch (Exception e) {
return ev.getY();
}
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
final int action = ev.getAction();
switch (action & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
mActivePointerId = ev.getPointerId(0);
break;
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
mActivePointerId = INVALID_POINTER_ID;
break;
case MotionEvent.ACTION_POINTER_UP:
final int pointerIndex = (ev.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = ev.getPointerId(pointerIndex);
if (pointerId == mActivePointerId) {
// This was our active pointer going up. Choose a new
// active pointer and adjust accordingly.
final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
mActivePointerId = ev.getPointerId(newPointerIndex);
mLastTouchX = ev.getX(newPointerIndex);
mLastTouchY = ev.getY(newPointerIndex);
}
break;
}
mActivePointerIndex = ev.findPointerIndex(mActivePointerId != INVALID_POINTER_ID ? mActivePointerId : 0);
return super.onTouchEvent(ev);
}
}
@TargetApi(8)
private static class FroyoDetector extends EclairDetector {
private final ScaleGestureDetector mDetector;
// Needs to be an inner class so that we don't hit
// VerifyError's on API 4.
private final OnScaleGestureListener mScaleListener = new OnScaleGestureListener() {
@Override
public boolean onScale(ScaleGestureDetector detector) {
mListener.onScale(detector.getScaleFactor(), detector.getFocusX(), detector.getFocusY());
return true;
}
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
return true;
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
// NO-OP
}
};
public FroyoDetector(Context context) {
super(context);
mDetector = new ScaleGestureDetector(context, mScaleListener);
}
@Override
public boolean isScaling() {
return mDetector.isInProgress();
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
mDetector.onTouchEvent(ev);
return super.onTouchEvent(ev);
}
}
}
| |
/*
Copyright [2013-2014] eBay Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package models.data;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import models.utils.VarUtils;
/**
*
* @author ypei
*
*/
public class AgentCommandMetadata implements Comparable<AgentCommandMetadata> {
private String agentCommandType; // this is the PK
private String httpMethod;
private String requestUrlPostfix;
private String requestContentTemplate;
private String requestProtocol;
private String requestPort;
private long pauseIntervalWorkerMillis;
private int responseExtractIndexStart;
private int responseExtractIndexEnd;
// 20131013
private int maxConcurrency;
// 20131213
private String httpHeaderType;
public AgentCommandMetadata(String agentCommandType, String httpMethod,
String requestUrlPostfix, String requestContentTemplate,
String requestProtocol, String requestPort, int maxConcurrency,
long pauseIntervalWorkerMillis, int responseExtractIndexStart,
int responseExtractIndexEnd, String httpHeaderType
) {
super();
this.agentCommandType = agentCommandType;
this.httpMethod = httpMethod;
this.requestUrlPostfix = requestUrlPostfix;
this.requestContentTemplate = requestContentTemplate;
this.requestProtocol = requestProtocol;
this.requestPort = requestPort;
this.maxConcurrency = maxConcurrency;
this.pauseIntervalWorkerMillis = pauseIntervalWorkerMillis;
this.responseExtractIndexStart = responseExtractIndexStart;
this.responseExtractIndexEnd = responseExtractIndexEnd;
this.httpHeaderType = httpHeaderType;
}
public String getHttpHeaderType() {
return httpHeaderType;
}
public void setHttpHeaderType(String httpHeaderType) {
this.httpHeaderType = httpHeaderType;
}
public int getMaxConcurrency() {
return maxConcurrency;
}
public void setMaxConcurrency(int maxConcurrency) {
this.maxConcurrency = maxConcurrency;
}
/**
* THIS DOES NOT COMPLETELY COPY!! MISSING requestContentTemplate, LIST
* ELEMENT for easy JSON passing to html template!!
*
* @param other
*/
public AgentCommandMetadata(AgentCommandMetadata other
) {
super();
this.agentCommandType = other.agentCommandType;
this.httpMethod = other.httpMethod;
// this.requestUrlPostfix = other.requestUrlPostfix;
// this.requestContentTemplate = other.requestContentTemplate;
this.requestProtocol = other.requestProtocol;
this.requestPort = other.requestPort;
this.pauseIntervalWorkerMillis = other.pauseIntervalWorkerMillis;
this.responseExtractIndexStart = other.responseExtractIndexStart;
this.responseExtractIndexEnd = other.responseExtractIndexEnd;
this.maxConcurrency = other.maxConcurrency;
}
public String getRequestProtocol() {
return requestProtocol;
}
public void setRequestProtocol(String requestProtocol) {
this.requestProtocol = requestProtocol;
}
public String getRequestPort() {
return requestPort;
}
public void setRequestPort(String requestPort) {
this.requestPort = requestPort;
}
public String getAgentCommandType() {
return agentCommandType;
}
public void setAgentCommandType(String agentCommandType) {
this.agentCommandType = agentCommandType;
}
public String getHttpMethod() {
return httpMethod;
}
public void setHttpMethod(String httpMethod) {
this.httpMethod = httpMethod;
}
public String getRequestUrlPostfix() {
return requestUrlPostfix;
}
public void setRequestUrlPostfix(String requestUrlPostfix) {
this.requestUrlPostfix = requestUrlPostfix;
}
public String getRequestContentTemplate() {
return requestContentTemplate;
}
public void setRequestContentTemplate(String requestContentTemplate) {
this.requestContentTemplate = requestContentTemplate;
}
public long getPauseIntervalWorkerMillis() {
return pauseIntervalWorkerMillis;
}
public void setPauseIntervalWorkerMillis(long pauseIntervalWorkerMillis) {
this.pauseIntervalWorkerMillis = pauseIntervalWorkerMillis;
}
public int getResponseExtractIndexStart() {
return responseExtractIndexStart;
}
public void setResponseExtractIndexStart(int responseExtractIndexStart) {
this.responseExtractIndexStart = responseExtractIndexStart;
}
public int getResponseExtractIndexEnd() {
return responseExtractIndexEnd;
}
public void setResponseExtractIndexEnd(int responseExtractIndexEnd) {
this.responseExtractIndexEnd = responseExtractIndexEnd;
}
@Override
public String toString() {
return "AgentCommandMetadata [agentCommandType=" + agentCommandType
+ ", httpMethod=" + httpMethod + ", requestUrlPostfix="
+ requestUrlPostfix + ", requestContentTemplate="
+ requestContentTemplate + ", requestProtocol="
+ requestProtocol + ", requestPort=" + requestPort + "]";
}
public static String replaceFullRequestContent(
String requestContentTemplate, String replacementString) {
return (requestContentTemplate.replace(
VarUtils.AGENT_COMMAND_VAR_DEFAULT_REQUEST_CONTENT,
replacementString));
}
// only replace "$AM_FULL_CONTENT" by ""
public static String replaceDefaultFullRequestContent(
String requestContentTemplate) {
return replaceFullRequestContent(requestContentTemplate, "");
}
/**
* THIS DOES NOT COMPLETELY COPY!! MISSING requestContentTemplate, LIST
* ELEMENT for easy JSON passing to html template!!
*
* @param other
*/
public static List<AgentCommandMetadata> convertMapToList(
Map<String, AgentCommandMetadata> map) {
List<AgentCommandMetadata> list = new ArrayList<AgentCommandMetadata>();
for (Entry<String, AgentCommandMetadata> entry : map.entrySet()) {
AgentCommandMetadata copyWithoutList = new AgentCommandMetadata(
entry.getValue());
list.add(copyWithoutList);
}
return list;
}
@Override
public int compareTo(AgentCommandMetadata o) {
String agentCommandTypeOther = ((AgentCommandMetadata) o).agentCommandType;
// ascending order
return this.agentCommandType.compareTo(agentCommandTypeOther);
}
}
| |
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.drm;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertThrows;
import android.os.Looper;
import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSessionManager.DrmSessionReference;
import com.google.android.exoplayer2.drm.ExoMediaDrm.AppManagedProvider;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.testutil.FakeExoMediaDrm;
import com.google.android.exoplayer2.testutil.TestUtil;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowLooper;
/** Tests for {@link DefaultDrmSessionManager} and {@link DefaultDrmSession}. */
// TODO: Test more branches:
// - Different sources for licenseServerUrl.
// - Multiple acquisitions & releases for same keys -> multiple requests.
// - Key denial.
@RunWith(AndroidJUnit4.class)
public class DefaultDrmSessionManagerTest {
private static final UUID DRM_SCHEME_UUID =
UUID.nameUUIDFromBytes(TestUtil.createByteArray(7, 8, 9));
private static final ImmutableList<DrmInitData.SchemeData> DRM_SCHEME_DATAS =
ImmutableList.of(
new DrmInitData.SchemeData(
DRM_SCHEME_UUID, MimeTypes.VIDEO_MP4, /* data= */ TestUtil.createByteArray(1, 2, 3)));
private static final Format FORMAT_WITH_DRM_INIT_DATA =
new Format.Builder().setDrmInitData(new DrmInitData(DRM_SCHEME_DATAS)).build();
@Test(timeout = 10_000)
public void acquireSession_triggersKeyLoadAndSessionIsOpened() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
}
@Test(timeout = 10_000)
public void keepaliveEnabled_sessionsKeptForRequestedTime() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void keepaliveDisabled_sessionsReleasedImmediately() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void managerRelease_allKeepaliveSessionsImmediatelyReleased() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void managerRelease_keepaliveDisabled_doesntReleaseAnySessions() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Release the manager, the session should still be open (though it's unusable because
// the underlying ExoMediaDrm is released).
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void managerRelease_mediaDrmNotReleasedUntilLastSessionReleased() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSessionManager.release();
// The manager is now in a 'releasing' state because the session is still active - so the
// ExoMediaDrm instance should still be active (with 1 reference held by this test, and 1 held
// by the manager).
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
// And re-preparing the session shouldn't acquire another reference.
drmSessionManager.prepare();
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
drmSessionManager.release();
drmSession.release(/* eventDispatcher= */ null);
// The final session has been released, so now the ExoMediaDrm should be released too.
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(1);
// Re-preparing the fully released manager should now acquire another ExoMediaDrm reference.
drmSessionManager.prepare();
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
drmSessionManager.release();
exoMediaDrm.release();
}
// https://github.com/google/ExoPlayer/issues/9193
@Test(timeout = 10_000)
public void
managerReleasedBeforeSession_keepaliveEnabled_managerOnlyReleasesOneKeepaliveReference()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm.Builder().build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
// Release the manager (there's still an explicit reference to the session from acquireSession).
// This should immediately release the manager's internal keepalive session reference.
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Ensure the manager doesn't release a *second* keepalive session reference after the timer
// expires.
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Release the explicit session reference.
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allKeepAliveSessionsEagerlyReleased() throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas =
ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData =
new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm(/* maxConcurrentSessions= */ 1))
.setSessionKeepaliveMs(10_000)
.setMultiSession(true)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
// All external references to firstDrmSession have been released, it's being kept alive by
// drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, secondFormatWithDrmInitData));
// The drmSessionManager had to release firstDrmSession in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allPreacquiredAndKeepaliveSessionsEagerlyReleased()
throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas =
ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData =
new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setMaxConcurrentSessions(1).build())
.setSessionKeepaliveMs(10_000)
.setMultiSession(true)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSessionReference firstDrmSessionReference =
checkNotNull(
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
// The direct reference to firstDrmSession has been released, it's being kept alive by both
// firstDrmSessionReference and drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, secondFormatWithDrmInitData));
// The drmSessionManager had to release both it's internal keep-alive reference and the
// reference represented by firstDrmSessionReference in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Not needed (because the manager has already released this reference) but we call it anyway
// for completeness.
firstDrmSessionReference.release();
// Clean-up
secondDrmSession.release(/* eventDispatcher= */ null);
drmSessionManager.release();
}
@Test(timeout = 10_000)
public void sessionReacquired_keepaliveTimeOutCancelled() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
ShadowLooper.idleMainLooper(5, SECONDS);
// Acquire a session for the same init data 5s in to the 10s timeout (so expect the same
// instance).
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(secondDrmSession).isSameInstanceAs(firstDrmSession);
// Let the timeout definitely expire, and check the session didn't get released.
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void preacquireSession_loadsKeysBeforeFullAcquisition() throws Exception {
AtomicInteger keyLoadCount = new AtomicInteger(0);
DrmSessionEventListener.EventDispatcher eventDispatcher =
new DrmSessionEventListener.EventDispatcher();
eventDispatcher.addEventListener(
Util.createHandlerForCurrentLooper(),
new DrmSessionEventListener() {
@Override
public void onDrmKeysLoaded(
int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) {
keyLoadCount.incrementAndGet();
}
});
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(eventDispatcher, FORMAT_WITH_DRM_INIT_DATA);
// Wait for the key load event to propagate, indicating the pre-acquired session is in
// STATE_OPENED_WITH_KEYS.
while (keyLoadCount.get() == 0) {
// Allow the key response to be handled.
ShadowLooper.idleMainLooper();
}
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Without idling the main/playback looper, we assert the session is already in OPENED_WITH_KEYS
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(keyLoadCount.get()).isEqualTo(1);
// After releasing our concrete session reference, the session is held open by the pre-acquired
// reference.
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Releasing the pre-acquired reference allows the session to be fully released.
sessionReference.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void
preacquireSession_releaseBeforeUnderlyingAcquisitionCompletesReleasesSessionOnceAcquired()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA);
// Release the pre-acquired reference before the underlying session has had a chance to be
// constructed.
sessionReference.release();
// Acquiring the same session triggers a second key load (because the pre-acquired session was
// fully released).
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void preacquireSession_releaseManagerBeforeAcquisition_acquisitionDoesntHappen()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA);
// Release the manager before the underlying session has had a chance to be constructed. This
// will release all pre-acquired sessions.
drmSessionManager.release();
// Allow the acquisition event to be handled on the main/playback thread.
ShadowLooper.idleMainLooper();
// Re-prepare the manager so we can fully acquire the same session, and check the previous
// pre-acquisition didn't do anything.
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
// If the (still unreleased) pre-acquired session above was linked to the same underlying
// session then the state would still be OPENED_WITH_KEYS.
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
// Release the pre-acquired session from above (this is a no-op, but we do it anyway for
// correctness).
sessionReference.release();
drmSessionManager.release();
}
@Test(timeout = 10_000)
public void keyRefreshEvent_triggersKeyRefresh() throws Exception {
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DefaultDrmSession drmSession =
(DefaultDrmSession)
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(1);
exoMediaDrm.triggerEvent(
drmSession::hasSessionId,
ExoMediaDrm.EVENT_KEY_REQUIRED,
/* extra= */ 0,
/* data= */ Util.EMPTY_BYTE_ARRAY);
while (licenseServer.getReceivedSchemeDatas().size() == 1) {
// Allow the key refresh event to be handled.
ShadowLooper.idleMainLooper();
}
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(2);
assertThat(ImmutableSet.copyOf(licenseServer.getReceivedSchemeDatas())).hasSize(1);
drmSession.release(/* eventDispatcher= */ null);
drmSessionManager.release();
exoMediaDrm.release();
}
@Test(timeout = 10_000)
public void keyRefreshEvent_whileManagerIsReleasing_triggersKeyRefresh() throws Exception {
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DefaultDrmSession drmSession =
(DefaultDrmSession)
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(1);
drmSessionManager.release();
exoMediaDrm.triggerEvent(
drmSession::hasSessionId,
ExoMediaDrm.EVENT_KEY_REQUIRED,
/* extra= */ 0,
/* data= */ Util.EMPTY_BYTE_ARRAY);
while (licenseServer.getReceivedSchemeDatas().size() == 1) {
// Allow the key refresh event to be handled.
ShadowLooper.idleMainLooper();
}
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(2);
assertThat(ImmutableSet.copyOf(licenseServer.getReceivedSchemeDatas())).hasSize(1);
drmSession.release(/* eventDispatcher= */ null);
exoMediaDrm.release();
}
@Test
public void
deviceNotProvisioned_exceptionThrownFromOpenSession_provisioningDoneAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setProvisionsRequired(1).build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void
deviceNotProvisioned_exceptionThrownFromGetKeyRequest_provisioningDoneAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid ->
new FakeExoMediaDrm.Builder()
.setProvisionsRequired(1)
.throwNotProvisionedExceptionFromGetKeyRequest()
.build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void deviceNotProvisioned_doubleProvisioningHandledAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setProvisionsRequired(2).build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(2);
}
@Test
public void keyResponseIndicatesProvisioningRequired_provisioningDone() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.requiringProvisioningThenAllowingSchemeDatas(
DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm.Builder().build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void provisioningUndoneWhileManagerIsActive_deviceReprovisioned() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm mediaDrm = new FakeExoMediaDrm.Builder().setProvisionsRequired(2).build();
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(mediaDrm))
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
mediaDrm.resetProvisioning();
drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(4);
}
@Test
public void managerNotPrepared_acquireSessionAndPreacquireSessionFail() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
assertThrows(
Exception.class,
() ->
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThrows(
Exception.class,
() ->
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
}
@Test
public void managerReleasing_acquireSessionAndPreacquireSessionFail() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSessionManager.release();
// The manager's prepareCount is now zero, but the drmSession is keeping it in a 'releasing'
// state. acquireSession and preacquireSession should still fail.
assertThrows(
Exception.class,
() ->
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThrows(
Exception.class,
() ->
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSession.release(/* eventDispatcher= */ null);
}
private static void waitForOpenedWithKeys(DrmSession drmSession) {
while (drmSession.getState() != DrmSession.STATE_OPENED_WITH_KEYS) {
// Check the error first, so we get a meaningful failure if there's been an error.
assertThat(drmSession.getError()).isNull();
assertThat(drmSession.getState()).isAnyOf(DrmSession.STATE_OPENING, DrmSession.STATE_OPENED);
// Allow the key response to be handled.
ShadowLooper.idleMainLooper();
}
}
}
| |
// SECTION-START[License Header]
// <editor-fold defaultstate="collapsed" desc=" Generated License ">
/*
* Java Object Management and Configuration
* Copyright (C) Christian Schulte, 2005-206
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $JOMC$
*
*/
// </editor-fold>
// SECTION-END
package org.jomc.standalone.ri.naming.support;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceException;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceProvider;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
// SECTION-START[Documentation]
// <editor-fold defaultstate="collapsed" desc=" Generated Documentation ">
/**
* Base JPA context factory.
*
* <dl>
* <dt><b>Identifier:</b></dt><dd>org.jomc.standalone.ri.naming.support.AbstractJPAContextFactory</dd>
* <dt><b>Name:</b></dt><dd>JOMC Standalone RI AbstractJPAContextFactory</dd>
* <dt><b>Specifications:</b></dt>
* <dd>javax.naming.spi.InitialContextFactory</dd>
* <dt><b>Abstract:</b></dt><dd>Yes</dd>
* <dt><b>Final:</b></dt><dd>No</dd>
* <dt><b>Stateless:</b></dt><dd>No</dd>
* </dl>
*
* @author <a href="mailto:schulte2005@users.sourceforge.net">Christian Schulte</a> 1.0
* @version 1.0-beta-3-SNAPSHOT
*/
// </editor-fold>
// SECTION-END
// SECTION-START[Annotations]
// <editor-fold defaultstate="collapsed" desc=" Generated Annotations ">
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
// </editor-fold>
// SECTION-END
public abstract class AbstractJPAContextFactory extends AbstractContextFactory
{
// SECTION-START[InitialContextFactory]
/**
* {@inheritDoc}
* <p>This method creates an {@code EntityManagerFactory} using the {@code PersistenceUnitInfo} corresponding to the
* environment of the factory and uses that factory to create an {@code EntityManager} binding both objects to the
* context of the factory.</p>
*
* @param environment The possibly {@code null} environment specifying information to be used in the creation of the
* initial context.
*
* @return {@code null}.
*
* @throws NamingException if binding the {@code EntityManager} or {@code EntityManagerFactory} fails.
*/
public final Context getInitialContext( final Hashtable<?, ?> environment ) throws NamingException
{
final EntityManagerFactory entityManagerFactory =
this.getPersistenceProvider().createContainerEntityManagerFactory(
this.getPersistenceUnitInfo(), environment );
this.getStandaloneContext().bind( this.getStandaloneEnvironment().getEntityManagerFactoryJndiName(),
entityManagerFactory );
this.getStandaloneContext().bind( this.getStandaloneEnvironment().getEntityManagerJndiName(),
entityManagerFactory.createEntityManager() );
return null;
}
// SECTION-END
// SECTION-START[AbstractJPAContextFactory]
/** JPA namespace URI. */
private static final String PERSISTENCE_NS = "http://java.sun.com/xml/ns/persistence";
/** The {@code PersistenceUnitInfo} corresponding to the factories environment. */
private PersistenceUnitInfo persistenceUnitInfo;
/**
* Gets the {@code PersistenceProvider} backing the factory.
*
* @return The {@code PersistenceProvider} backing the factory.
*/
protected abstract PersistenceProvider getPersistenceProvider();
/**
* Gets the {@code PersistenceUnitInfo} corresponding to the factories environment.
*
* @return The {@code PersistenceUnitInfo} corresponding to the factories environment.
*/
protected PersistenceUnitInfo getPersistenceUnitInfo()
{
if ( this.persistenceUnitInfo == null )
{
this.persistenceUnitInfo = new PersistenceUnitInfo()
{
private List<ClassTransformer> transformers;
private List<String> managedClasses;
private List<String> mappingFileNames;
private ClassLoader classLoader;
public String getPersistenceUnitName()
{
return "jomc-standalone";
}
public String getPersistenceProviderClassName()
{
return getPersistenceProvider().getClass().getName();
}
public PersistenceUnitTransactionType getTransactionType()
{
return PersistenceUnitTransactionType.JTA;
}
public DataSource getJtaDataSource()
{
try
{
return (DataSource) getStandaloneContext().lookup(
getStandaloneEnvironment().getJtaDataSourceJndiName() );
}
catch ( final NamingException e )
{
throw new PersistenceException( getMessage( e ), e );
}
}
public DataSource getNonJtaDataSource()
{
return null;
}
public List<String> getMappingFileNames()
{
try
{
if ( this.mappingFileNames == null )
{
this.mappingFileNames = this.getPersistenceUnitElements( "mapping-file" );
}
return this.mappingFileNames;
}
catch ( final SAXException e )
{
throw new PersistenceException( getMessage( e ), e );
}
catch ( final IOException e )
{
throw new PersistenceException( getMessage( e ), e );
}
catch ( final ParserConfigurationException e )
{
throw new PersistenceException( getMessage( e ), e );
}
}
public List<URL> getJarFileUrls()
{
try
{
final List<URL> jarFileUrls = new LinkedList<URL>();
for ( final Enumeration<URL> unitUrls = this.getClassLoader().getResources(
"META-INF/persistence.xml" ); unitUrls.hasMoreElements(); )
{
final URL unitUrl = unitUrls.nextElement();
final String externalForm = unitUrl.toExternalForm();
final String jarUrl = externalForm.substring( 0, externalForm.indexOf( "META-INF" ) );
jarFileUrls.add( new URL( jarUrl ) );
}
return jarFileUrls;
}
catch ( final IOException e )
{
throw new PersistenceException( getMessage( e ), e );
}
}
public URL getPersistenceUnitRootUrl()
{
return getStandaloneEnvironment().getJpaRootUrl();
}
public List<String> getManagedClassNames()
{
try
{
if ( this.managedClasses == null )
{
this.managedClasses = this.getPersistenceUnitElements( "class" );
}
return this.managedClasses;
}
catch ( final SAXException e )
{
throw new PersistenceException( getMessage( e ), e );
}
catch ( final IOException e )
{
throw new PersistenceException( getMessage( e ), e );
}
catch ( final ParserConfigurationException e )
{
throw new PersistenceException( getMessage( e ), e );
}
}
public boolean excludeUnlistedClasses()
{
return true;
}
public Properties getProperties()
{
return getStandaloneEnvironment().getProperties();
}
public ClassLoader getClassLoader()
{
if ( this.classLoader == null )
{
this.classLoader = new URLClassLoader( new URL[]
{
getStandaloneEnvironment().getJpaRootUrl()
}, this.getClass().getClassLoader() );
}
return this.classLoader;
}
public void addTransformer( final ClassTransformer transformer )
{
if ( this.transformers == null )
{
this.transformers = new LinkedList<ClassTransformer>();
}
this.transformers.add( transformer );
}
public ClassLoader getNewTempClassLoader()
{
final List<URL> jarFileUrls = this.getJarFileUrls();
jarFileUrls.add( getStandaloneEnvironment().getJpaRootUrl() );
return new URLClassLoader( jarFileUrls.toArray( new URL[ jarFileUrls.size() ] ),
this.getClass().getClassLoader() );
}
private List<String> getPersistenceUnitElements( final String name )
throws ParserConfigurationException, IOException, SAXException
{
final List<String> elements = new LinkedList<String>();
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware( true );
factory.setValidating( false );
final DocumentBuilder documentBuilder = factory.newDocumentBuilder();
for ( final Enumeration<URL> e = this.getNewTempClassLoader().getResources(
"META-INF/persistence.xml" ); e.hasMoreElements(); )
{
final URL url = e.nextElement();
final InputStream in = url.openStream();
final Document doc = documentBuilder.parse( in );
in.close();
final NodeList persistenceUnits =
doc.getElementsByTagNameNS( PERSISTENCE_NS, "persistence-unit" );
for ( int i = persistenceUnits.getLength() - 1; i >= 0; i-- )
{
final Element persistenceUnit = (Element) persistenceUnits.item( i );
final NodeList nodeList = persistenceUnit.getElementsByTagNameNS( PERSISTENCE_NS, name );
for ( int j = nodeList.getLength() - 1; j >= 0; j-- )
{
final Element element = (Element) nodeList.item( j );
elements.add( element.getFirstChild().getNodeValue() );
}
}
}
return elements;
}
};
}
return this.persistenceUnitInfo;
}
private static String getMessage( final Throwable t )
{
return t != null ? t.getMessage() != null ? t.getMessage() : getMessage( t.getCause() ) : null;
}
// SECTION-END
// SECTION-START[Constructors]
// <editor-fold defaultstate="collapsed" desc=" Generated Constructors ">
/** Creates a new {@code AbstractJPAContextFactory} instance. */
@javax.annotation.Generated( value = "org.jomc.tools.SourceFileProcessor 1.2", comments = "See http://jomc.sourceforge.net/jomc/1.2/jomc-tools-1.2" )
public AbstractJPAContextFactory()
{
// SECTION-START[Default Constructor]
super();
// SECTION-END
}
// </editor-fold>
// SECTION-END
// SECTION-START[Dependencies]
// SECTION-END
// SECTION-START[Properties]
// SECTION-END
// SECTION-START[Messages]
// SECTION-END
}
| |
/**
* <copyright>
* </copyright>
*
*/
package cruise.umple.umple.impl;
import cruise.umple.umple.AssociationEnd_;
import cruise.umple.umple.IsSorted_;
import cruise.umple.umple.Multiplicity_;
import cruise.umple.umple.UmplePackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Association End </b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link cruise.umple.umple.impl.AssociationEnd_Impl#getMultiplicity_1 <em>Multiplicity 1</em>}</li>
* <li>{@link cruise.umple.umple.impl.AssociationEnd_Impl#getType_1 <em>Type 1</em>}</li>
* <li>{@link cruise.umple.umple.impl.AssociationEnd_Impl#getRoleName_1 <em>Role Name 1</em>}</li>
* <li>{@link cruise.umple.umple.impl.AssociationEnd_Impl#getIsSorted_1 <em>Is Sorted 1</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class AssociationEnd_Impl extends MinimalEObjectImpl.Container implements AssociationEnd_
{
/**
* The cached value of the '{@link #getMultiplicity_1() <em>Multiplicity 1</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMultiplicity_1()
* @generated
* @ordered
*/
protected EList<Multiplicity_> multiplicity_1;
/**
* The default value of the '{@link #getType_1() <em>Type 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType_1()
* @generated
* @ordered
*/
protected static final String TYPE_1_EDEFAULT = null;
/**
* The cached value of the '{@link #getType_1() <em>Type 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType_1()
* @generated
* @ordered
*/
protected String type_1 = TYPE_1_EDEFAULT;
/**
* The default value of the '{@link #getRoleName_1() <em>Role Name 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRoleName_1()
* @generated
* @ordered
*/
protected static final String ROLE_NAME_1_EDEFAULT = null;
/**
* The cached value of the '{@link #getRoleName_1() <em>Role Name 1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRoleName_1()
* @generated
* @ordered
*/
protected String roleName_1 = ROLE_NAME_1_EDEFAULT;
/**
* The cached value of the '{@link #getIsSorted_1() <em>Is Sorted 1</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getIsSorted_1()
* @generated
* @ordered
*/
protected EList<IsSorted_> isSorted_1;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AssociationEnd_Impl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return UmplePackage.eINSTANCE.getAssociationEnd_();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Multiplicity_> getMultiplicity_1()
{
if (multiplicity_1 == null)
{
multiplicity_1 = new EObjectContainmentEList<Multiplicity_>(Multiplicity_.class, this, UmplePackage.ASSOCIATION_END___MULTIPLICITY_1);
}
return multiplicity_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getType_1()
{
return type_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setType_1(String newType_1)
{
String oldType_1 = type_1;
type_1 = newType_1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, UmplePackage.ASSOCIATION_END___TYPE_1, oldType_1, type_1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getRoleName_1()
{
return roleName_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRoleName_1(String newRoleName_1)
{
String oldRoleName_1 = roleName_1;
roleName_1 = newRoleName_1;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, UmplePackage.ASSOCIATION_END___ROLE_NAME_1, oldRoleName_1, roleName_1));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<IsSorted_> getIsSorted_1()
{
if (isSorted_1 == null)
{
isSorted_1 = new EObjectContainmentEList<IsSorted_>(IsSorted_.class, this, UmplePackage.ASSOCIATION_END___IS_SORTED_1);
}
return isSorted_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case UmplePackage.ASSOCIATION_END___MULTIPLICITY_1:
return ((InternalEList<?>)getMultiplicity_1()).basicRemove(otherEnd, msgs);
case UmplePackage.ASSOCIATION_END___IS_SORTED_1:
return ((InternalEList<?>)getIsSorted_1()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case UmplePackage.ASSOCIATION_END___MULTIPLICITY_1:
return getMultiplicity_1();
case UmplePackage.ASSOCIATION_END___TYPE_1:
return getType_1();
case UmplePackage.ASSOCIATION_END___ROLE_NAME_1:
return getRoleName_1();
case UmplePackage.ASSOCIATION_END___IS_SORTED_1:
return getIsSorted_1();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case UmplePackage.ASSOCIATION_END___MULTIPLICITY_1:
getMultiplicity_1().clear();
getMultiplicity_1().addAll((Collection<? extends Multiplicity_>)newValue);
return;
case UmplePackage.ASSOCIATION_END___TYPE_1:
setType_1((String)newValue);
return;
case UmplePackage.ASSOCIATION_END___ROLE_NAME_1:
setRoleName_1((String)newValue);
return;
case UmplePackage.ASSOCIATION_END___IS_SORTED_1:
getIsSorted_1().clear();
getIsSorted_1().addAll((Collection<? extends IsSorted_>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case UmplePackage.ASSOCIATION_END___MULTIPLICITY_1:
getMultiplicity_1().clear();
return;
case UmplePackage.ASSOCIATION_END___TYPE_1:
setType_1(TYPE_1_EDEFAULT);
return;
case UmplePackage.ASSOCIATION_END___ROLE_NAME_1:
setRoleName_1(ROLE_NAME_1_EDEFAULT);
return;
case UmplePackage.ASSOCIATION_END___IS_SORTED_1:
getIsSorted_1().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case UmplePackage.ASSOCIATION_END___MULTIPLICITY_1:
return multiplicity_1 != null && !multiplicity_1.isEmpty();
case UmplePackage.ASSOCIATION_END___TYPE_1:
return TYPE_1_EDEFAULT == null ? type_1 != null : !TYPE_1_EDEFAULT.equals(type_1);
case UmplePackage.ASSOCIATION_END___ROLE_NAME_1:
return ROLE_NAME_1_EDEFAULT == null ? roleName_1 != null : !ROLE_NAME_1_EDEFAULT.equals(roleName_1);
case UmplePackage.ASSOCIATION_END___IS_SORTED_1:
return isSorted_1 != null && !isSorted_1.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (type_1: ");
result.append(type_1);
result.append(", roleName_1: ");
result.append(roleName_1);
result.append(')');
return result.toString();
}
} //AssociationEnd_Impl
| |
/*
* This code comes from H2 database project and was modified for MapDB a bit.
* Re-licensed under Apache 2 license with Thomas Mueller permission
*
* Copyright (c) 2004-2011 H2 Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Original H2 license
*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html).
*
* This code is based on the LZF algorithm from Marc Lehmann. It is a
* re-implementation of the C code:
* http://cvs.schmorp.de/liblzf/lzf_c.c?view=markup
* http://cvs.schmorp.de/liblzf/lzf_d.c?view=markup
*
* According to a mail from Marc Lehmann, it's OK to use his algorithm:
* Date: 2010-07-15 15:57
* Subject: Re: Question about LZF licensing
* ...
* The algorithm is not copyrighted (and cannot be copyrighted afaik) - as long
* as you wrote everything yourself, without copying my code, that's just fine
* (looking is of course fine too).
* ...
*
* Still I would like to keep his copyright info:
*
* Copyright (c) 2000-2005 Marc Alexander Lehmann <schmorp@schmorp.de>
* Copyright (c) 2005 Oren J. Maurice <oymaurice@hazorea.org.il>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.mapdb;
import java.io.DataInput;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* <p>
* This class implements the LZF lossless data compression algorithm. LZF is a
* Lempel-Ziv variant with byte-aligned output, and optimized for speed.
* </p>
* <p>
* Safety/Use Notes:
* </p>
* <ul>
* <li>Each instance should be used by a single thread only.</li>
* <li>The data buffers should be smaller than 1 GB.</li>
* <li>For performance reasons, safety checks on expansion are omitted.</li>
* <li>Invalid compressed data can cause an ArrayIndexOutOfBoundsException.</li>
* </ul>
* <p>
* The LZF compressed format knows literal runs and back-references:
* </p>
* <ul>
* <li>Literal run: directly copy bytes from input to output.</li>
* <li>Back-reference: copy previous data to output stream, with specified
* offset from location and length. The length is at least 3 bytes.</li>
* </ul>
*<p>
* The first byte of the compressed stream is the control byte. For literal
* runs, the highest three bits of the control byte are not set, the the lower
* bits are the literal run length, and the next bytes are data to copy directly
* into the output. For back-references, the highest three bits of the control
* byte are the back-reference length. If all three bits are set, then the
* back-reference length is stored in the next byte. The lower bits of the
* control byte combined with the next byte form the offset for the
* back-reference.
* </p>
*/
public final class CompressLZF{
/**
* The number of entries in the hash table. The size is a trade-off between
* hash collisions (reduced compression) and speed (amount that fits in CPU
* cache).
*/
private static final int HASH_SIZE = 1 << 14;
/**
* The maximum number of literals in a chunk (32).
*/
private static final int MAX_LITERAL = 1 << 5;
/**
* The maximum offset allowed for a back-reference (8192).
*/
private static final int MAX_OFF = 1 << 13;
/**
* The maximum back-reference length (264).
*/
private static final int MAX_REF = (1 << 8) + (1 << 3);
/**
* Hash table for matching byte sequences (reused for performance).
*/
private int[] cachedHashTable;
/**
* Return byte with lower 2 bytes being byte at index, then index+1.
*/
private static int first(byte[] in, int inPos) {
return (in[inPos] << 8) | (in[inPos + 1] & 255);
}
/**
* Shift v 1 byte left, add value at index inPos+2.
*/
private static int next(int v, byte[] in, int inPos) {
return (v << 8) | (in[inPos + 2] & 255);
}
/**
* Compute the address in the hash table.
*/
private static int hash(int h) {
return ((h * 2777) >> 9) & (HASH_SIZE - 1);
}
public int compress(byte[] in, int inLen, byte[] out, int outPos) {
int inPos = 0;
if (cachedHashTable == null) {
cachedHashTable = new int[HASH_SIZE];
}
int[] hashTab = cachedHashTable;
int literals = 0;
outPos++;
int future = first(in, 0);
while (inPos < inLen - 4) {
byte p2 = in[inPos + 2];
// next
future = (future << 8) + (p2 & 255);
int off = hash(future);
int ref = hashTab[off];
hashTab[off] = inPos;
// if (ref < inPos
// && ref > 0
// && (off = inPos - ref - 1) < MAX_OFF
// && in[ref + 2] == p2
// && (((in[ref] & 255) << 8) | (in[ref + 1] & 255)) ==
// ((future >> 8) & 0xffff)) {
if (ref < inPos
&& ref > 0
&& (off = inPos - ref - 1) < MAX_OFF
&& in[ref + 2] == p2
&& in[ref + 1] == (byte) (future >> 8)
&& in[ref] == (byte) (future >> 16)) {
// match
int maxLen = inLen - inPos - 2;
if (maxLen > MAX_REF) {
maxLen = MAX_REF;
}
if (literals == 0) {
// multiple back-references,
// so there is no literal run control byte
outPos--;
} else {
// set the control byte at the start of the literal run
// to store the number of literals
out[outPos - literals - 1] = (byte) (literals - 1);
literals = 0;
}
int len = 3;
while (len < maxLen && in[ref + len] == in[inPos + len]) {
len++;
}
len -= 2;
if (len < 7) {
out[outPos++] = (byte) ((off >> 8) + (len << 5));
} else {
out[outPos++] = (byte) ((off >> 8) + (7 << 5));
out[outPos++] = (byte) (len - 7);
}
out[outPos++] = (byte) off;
// move one byte forward to allow for a literal run control byte
outPos++;
inPos += len;
// rebuild the future, and store the last bytes to the hashtable.
// Storing hashes of the last bytes in back-reference improves
// the compression ratio and only reduces speed slightly.
future = first(in, inPos);
future = next(future, in, inPos);
hashTab[hash(future)] = inPos++;
future = next(future, in, inPos);
hashTab[hash(future)] = inPos++;
} else {
// copy one byte from input to output as part of literal
out[outPos++] = in[inPos++];
literals++;
// at the end of this literal chunk, write the length
// to the control byte and start a new chunk
if (literals == MAX_LITERAL) {
out[outPos - literals - 1] = (byte) (literals - 1);
literals = 0;
// move ahead one byte to allow for the
// literal run control byte
outPos++;
}
}
}
// write the remaining few bytes as literals
while (inPos < inLen) {
out[outPos++] = in[inPos++];
literals++;
if (literals == MAX_LITERAL) {
out[outPos - literals - 1] = (byte) (literals - 1);
literals = 0;
outPos++;
}
}
// writes the final literal run length to the control byte
out[outPos - literals - 1] = (byte) (literals - 1);
if (literals == 0) {
outPos--;
}
return outPos;
}
public void expand(DataInput in, byte[] out, int outPos, int outLen) throws IOException {
// if ((inPos | outPos | outLen) < 0) {
if(CC.PARANOID && ! (outLen>=0))
throw new AssertionError();
do {
int ctrl = in.readByte() & 255;
if (ctrl < MAX_LITERAL) {
// literal run of length = ctrl + 1,
ctrl++;
// copy to output and move forward this many bytes
in.readFully(out,outPos,ctrl);
outPos += ctrl;
} else {
// back reference
// the highest 3 bits are the match length
int len = ctrl >> 5;
// if the length is maxed, add the next byte to the length
if (len == 7) {
len += in.readByte() & 255;
}
// minimum back-reference is 3 bytes,
// so 2 was subtracted before storing size
len += 2;
// ctrl is now the offset for a back-reference...
// the logical AND operation removes the length bits
ctrl = -((ctrl & 0x1f) << 8) - 1;
// the next byte augments/increases the offset
ctrl -= in.readByte() & 255;
// copy the back-reference bytes from the given
// location in output to current position
ctrl += outPos;
if (outPos + len >= out.length) {
// reduce array bounds checking
throw new ArrayIndexOutOfBoundsException();
}
for (int i = 0; i < len; i++) {
out[outPos++] = out[ctrl++];
}
}
} while (outPos < outLen);
}
public void expand(ByteBuffer in, int inPos, byte[] out, int outPos, int outLen) {
ByteBuffer in2=null;
if(CC.PARANOID && ! (outLen>=0))
throw new AssertionError();
do {
int ctrl = in.get(inPos++) & 255;
if (ctrl < MAX_LITERAL) {
// literal run of length = ctrl + 1,
ctrl++;
// copy to output and move forward this many bytes
//System.arraycopy(in, inPos, out, outPos, ctrl);
if(in2==null) in2 = in.duplicate();
in2.position(inPos);
in2.get(out,outPos,ctrl);
outPos += ctrl;
inPos += ctrl;
} else {
// back reference
// the highest 3 bits are the match length
int len = ctrl >> 5;
// if the length is maxed, add the next byte to the length
if (len == 7) {
len += in.get(inPos++) & 255;
}
// minimum back-reference is 3 bytes,
// so 2 was subtracted before storing size
len += 2;
// ctrl is now the offset for a back-reference...
// the logical AND operation removes the length bits
ctrl = -((ctrl & 0x1f) << 8) - 1;
// the next byte augments/increases the offset
ctrl -= in.get(inPos++) & 255;
// copy the back-reference bytes from the given
// location in output to current position
ctrl += outPos;
if (outPos + len >= out.length) {
// reduce array bounds checking
throw new ArrayIndexOutOfBoundsException();
}
for (int i = 0; i < len; i++) {
out[outPos++] = out[ctrl++];
}
}
} while (outPos < outLen);
}
public void expand(byte[] in, int inPos, byte[] out, int outPos, int outLen) {
// if ((inPos | outPos | outLen) < 0) {
if (inPos < 0 || outPos < 0 || outLen < 0) {
throw new IllegalArgumentException();
}
do {
int ctrl = in[inPos++] & 255;
if (ctrl < MAX_LITERAL) {
// literal run of length = ctrl + 1,
ctrl++;
// copy to output and move forward this many bytes
System.arraycopy(in, inPos, out, outPos, ctrl);
outPos += ctrl;
inPos += ctrl;
} else {
// back reference
// the highest 3 bits are the match length
int len = ctrl >> 5;
// if the length is maxed, add the next byte to the length
if (len == 7) {
len += in[inPos++] & 255;
}
// minimum back-reference is 3 bytes,
// so 2 was subtracted before storing size
len += 2;
// ctrl is now the offset for a back-reference...
// the logical AND operation removes the length bits
ctrl = -((ctrl & 0x1f) << 8) - 1;
// the next byte augments/increases the offset
ctrl -= in[inPos++] & 255;
// copy the back-reference bytes from the given
// location in output to current position
ctrl += outPos;
if (outPos + len >= out.length) {
// reduce array bounds checking
throw new ArrayIndexOutOfBoundsException();
}
for (int i = 0; i < len; i++) {
out[outPos++] = out[ctrl++];
}
}
} while (outPos < outLen);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.index;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.logging.Logger;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexWriter; // javadocs
import org.apache.lucene.store.*;
import org.apache.lucene.util.ReaderUtil;
import org.apache.lucene.util.Version;
/** Sort an index by document importance factor. Higher scoring documents are
* assigned smaller document numbers. Document weights are obtained from a
* specified field, which has to be single-valued and stored, with string value
* that represents a float number. Stored fields in the output index remain
* consistent, i.e. both stored fields and postings are renumbered in sync.
*
* <p><b>NOTE</b>: this tool is unaware of documents added
* atomically via {@link IndexWriter#addDocuments} or {@link
* IndexWriter#updateDocuments}, which means it can easily
* break up such document groups.
*/
public class IndexSorter {
private static final Logger LOG = Logger.getLogger(IndexSorter.class.getName());
private static class PostingMap implements Comparable<PostingMap> {
private int newDoc;
private long offset;
public int compareTo(PostingMap pm) { // order by newDoc id
return this.newDoc - pm.newDoc;
}
}
private static class SortedTermPositions implements TermPositions {
private TermPositions original;
private int[] oldToNew;
private int docFreq;
private PostingMap[] postingMaps = new PostingMap[0];
private int pointer;
private int freq;
private int position;
private static final String TEMP_FILE = "temp";
private final RAMDirectory tempDir = new RAMDirectory();
private RAMOutputStream out;
private IndexInput in;
public SortedTermPositions(TermPositions original, int[] oldToNew) {
this.original = original;
this.oldToNew = oldToNew;
try {
out = (RAMOutputStream)tempDir.createOutput(TEMP_FILE);
} catch (IOException ioe) {
LOG.warning("Error creating temporary output: " + ioe);
}
}
public void seek(Term term) throws IOException {
throw new UnsupportedOperationException();
}
public void seek(TermEnum terms) throws IOException {
original.seek(terms);
docFreq = terms.docFreq();
pointer = -1;
if (docFreq > postingMaps.length) { // grow postingsMap
PostingMap[] newMap = new PostingMap[docFreq];
System.arraycopy(postingMaps, 0, newMap, 0, postingMaps.length);
for (int i = postingMaps.length; i < docFreq; i++) {
newMap[i] = new PostingMap();
}
postingMaps = newMap;
}
out.reset();
int i = 0;
while (original.next()) {
PostingMap map = postingMaps[i++];
map.newDoc = oldToNew[original.doc()]; // remap the newDoc id
map.offset = out.getFilePointer(); // save pointer to buffer
final int tf = original.freq(); // buffer tf & positions
out.writeVInt(tf);
int prevPosition = 0;
for (int j = tf; j > 0; j--) { // delta encode positions
int p = original.nextPosition();
out.writeVInt(p - prevPosition);
prevPosition = p;
}
}
out.flush();
docFreq = i; // allow for deletions
Arrays.sort(postingMaps, 0, docFreq); // resort by mapped doc ids
// NOTE: this might be substantially faster if RAMInputStream were public
// and supported a reset() operation.
in = tempDir.openInput(TEMP_FILE);
}
public boolean next() throws IOException {
pointer++;
if (pointer < docFreq) {
in.seek(postingMaps[pointer].offset);
freq = in.readVInt();
position = 0;
return true;
}
return false;
}
public int doc() { return postingMaps[pointer].newDoc; }
public int freq() { return freq; }
public int nextPosition() throws IOException {
int positionIncrement = in.readVInt();
position += positionIncrement;
return position;
}
public int read(int[] docs, int[] freqs) {
throw new UnsupportedOperationException();
}
public boolean skipTo(int target) {
throw new UnsupportedOperationException();
}
public byte[] getPayload(byte[] data, int offset) throws IOException {
return null;
}
public int getPayloadLength() {
return 0;
}
public boolean isPayloadAvailable() {
return false;
}
public void close() throws IOException {
original.close();
}
}
private static class SortingReader extends FilterIndexReader {
private int[] oldToNew;
private int[] newToOld;
public SortingReader(IndexReader oldReader, int[] oldToNew) {
super(oldReader);
this.oldToNew = oldToNew;
this.newToOld = new int[oldReader.maxDoc()];
int oldDoc = 0;
while (oldDoc < oldToNew.length) {
int newDoc = oldToNew[oldDoc];
if (newDoc != -1) {
newToOld[newDoc] = oldDoc;
}
oldDoc++;
}
}
@Override
public IndexReader[] getSequentialSubReaders() {
return null;
}
@Override
public FieldInfos getFieldInfos() {
return ReaderUtil.getMergedFieldInfos(in);
}
@Override
public Document document(int n, FieldSelector fieldSelector)
throws CorruptIndexException, IOException {
return super.document(newToOld[n], fieldSelector);
}
@Override
public boolean isDeleted(int n) {
return false;
}
@Override
public byte[] norms(String f) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void norms(String f, byte[] norms, int offset) throws IOException {
byte[] oldNorms = super.norms(f);
int oldDoc = 0;
while (oldDoc < oldNorms.length) {
int newDoc = oldToNew[oldDoc];
if (newDoc != -1) {
norms[newDoc] = oldNorms[oldDoc];
}
oldDoc++;
}
}
@Override
protected void doSetNorm(int d, String f, byte b) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public TermDocs termDocs() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public TermPositions termPositions() throws IOException {
return new SortedTermPositions(super.termPositions(), oldToNew);
}
@Override
public TermFreqVector[] getTermFreqVectors(int docNumber)
throws IOException {
return super.getTermFreqVectors(newToOld[docNumber]);
}
@Override
protected void doDelete(int n) throws IOException {
throw new UnsupportedOperationException();
}
}
private static class DocScore implements Comparable<DocScore> {
private int oldDoc;
private float score;
public int compareTo(DocScore that) { // order by score, oldDoc
if (this.score == that.score) {
return this.oldDoc - that.oldDoc;
} else {
return this.score < that.score ? 1 : -1 ;
}
}
@Override
public String toString() {
return "oldDoc=" + oldDoc + ",score=" + score;
}
}
public IndexSorter() {
}
public void sort(Directory input, Directory output, String field) throws IOException {
LOG.info("IndexSorter: starting.");
long start = System.currentTimeMillis();
IndexReader reader = IndexReader.open(input, true);
SortingReader sorter = new SortingReader(reader, oldToNew(reader, field));
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_31, new WhitespaceAnalyzer(Version.LUCENE_31));
IndexWriter writer = new IndexWriter(output, cfg);
writer.addIndexes(sorter);
writer.close();
long end = System.currentTimeMillis();
LOG.info("IndexSorter: done, " + (end - start)
+ " total milliseconds");
}
private static int[] oldToNew(IndexReader reader, String field) throws IOException {
int readerMax = reader.maxDoc();
DocScore[] newToOld = new DocScore[readerMax];
FieldSelector fSel = new MapFieldSelector(field);
for (int oldDoc = 0; oldDoc < readerMax; oldDoc++) {
float score;
if (reader.isDeleted(oldDoc)) {
score = 0.0f;
} else {
Document d = reader.document(oldDoc, fSel);
try {
score = Float.parseFloat(d.get(field));
} catch (Exception e) {
score = 0.0f;
}
}
DocScore docScore = new DocScore();
docScore.oldDoc = oldDoc;
docScore.score = score;
newToOld[oldDoc] = docScore;
}
Arrays.sort(newToOld);
int[] oldToNew = new int[readerMax];
for (int newDoc = 0; newDoc < readerMax; newDoc++) {
DocScore docScore = newToOld[newDoc];
oldToNew[docScore.oldDoc] = newDoc;
}
return oldToNew;
}
/** */
public static void main(String[] args) throws Exception {
Directory input, output;
String field;
String usage = "IndexSorter <input> <output> <field>";
if (args.length < 3) {
System.err.println("Usage: " + usage);
System.exit(-1);
}
input = FSDirectory.open(new File(args[0]));
File out = new File(args[1]);
if (!out.exists()) out.mkdirs();
output = FSDirectory.open(out);
field = args[2];
IndexSorter sorter = new IndexSorter();
try {
sorter.sort(input, output, field);
} catch (Exception e) {
LOG.warning("IndexSorter: " + e);
}
}
}
| |
/*
* Copyright 2013 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.tickets;
import java.io.IOException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.IStoredSettings;
import com.gitblit.Keys;
import com.gitblit.extensions.TicketHook;
import com.gitblit.manager.INotificationManager;
import com.gitblit.manager.IPluginManager;
import com.gitblit.manager.IRepositoryManager;
import com.gitblit.manager.IRuntimeManager;
import com.gitblit.manager.IUserManager;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.TicketModel;
import com.gitblit.models.TicketModel.Attachment;
import com.gitblit.models.TicketModel.Change;
import com.gitblit.models.TicketModel.Field;
import com.gitblit.models.TicketModel.Patchset;
import com.gitblit.models.TicketModel.Status;
import com.gitblit.tickets.TicketIndexer.Lucene;
import com.gitblit.utils.DeepCopier;
import com.gitblit.utils.DiffUtils;
import com.gitblit.utils.DiffUtils.DiffStat;
import com.gitblit.utils.StringUtils;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
/**
* Abstract parent class of a ticket service that stubs out required methods
* and transparently handles Lucene indexing.
*
* @author James Moger
*
*/
public abstract class ITicketService {
public static final String SETTING_UPDATE_DIFFSTATS = "migration.updateDiffstats";
private static final String LABEL = "label";
private static final String MILESTONE = "milestone";
private static final String STATUS = "status";
private static final String COLOR = "color";
private static final String DUE = "due";
private static final String DUE_DATE_PATTERN = "yyyy-MM-dd";
/**
* Object filter interface to querying against all available ticket models.
*/
public interface TicketFilter {
boolean accept(TicketModel ticket);
}
protected final Logger log;
protected final IStoredSettings settings;
protected final IRuntimeManager runtimeManager;
protected final INotificationManager notificationManager;
protected final IUserManager userManager;
protected final IRepositoryManager repositoryManager;
protected final IPluginManager pluginManager;
protected final TicketIndexer indexer;
private final Cache<TicketKey, TicketModel> ticketsCache;
private final Map<String, List<TicketLabel>> labelsCache;
private final Map<String, List<TicketMilestone>> milestonesCache;
private final boolean updateDiffstats;
private static class TicketKey {
final String repository;
final long ticketId;
TicketKey(RepositoryModel repository, long ticketId) {
this.repository = repository.name;
this.ticketId = ticketId;
}
@Override
public int hashCode() {
return (repository + ticketId).hashCode();
}
@Override
public boolean equals(Object o) {
if (o instanceof TicketKey) {
return o.hashCode() == hashCode();
}
return false;
}
@Override
public String toString() {
return repository + ":" + ticketId;
}
}
/**
* Creates a ticket service.
*/
public ITicketService(
IRuntimeManager runtimeManager,
IPluginManager pluginManager,
INotificationManager notificationManager,
IUserManager userManager,
IRepositoryManager repositoryManager) {
this.log = LoggerFactory.getLogger(getClass());
this.settings = runtimeManager.getSettings();
this.runtimeManager = runtimeManager;
this.pluginManager = pluginManager;
this.notificationManager = notificationManager;
this.userManager = userManager;
this.repositoryManager = repositoryManager;
this.indexer = new TicketIndexer(runtimeManager);
CacheBuilder<Object, Object> cb = CacheBuilder.newBuilder();
this.ticketsCache = cb
.maximumSize(1000)
.expireAfterAccess(30, TimeUnit.MINUTES)
.build();
this.labelsCache = new ConcurrentHashMap<String, List<TicketLabel>>();
this.milestonesCache = new ConcurrentHashMap<String, List<TicketMilestone>>();
this.updateDiffstats = settings.getBoolean(SETTING_UPDATE_DIFFSTATS, true);
}
/**
* Start the service.
* @since 1.4.0
*/
public abstract ITicketService start();
/**
* Stop the service.
* @since 1.4.0
*/
public final ITicketService stop() {
indexer.close();
ticketsCache.invalidateAll();
repositoryManager.closeAll();
close();
return this;
}
/**
* Creates a ticket notifier. The ticket notifier is not thread-safe!
* @since 1.4.0
*/
public TicketNotifier createNotifier() {
return new TicketNotifier(
runtimeManager,
notificationManager,
userManager,
repositoryManager,
this);
}
/**
* Returns the ready status of the ticket service.
*
* @return true if the ticket service is ready
* @since 1.4.0
*/
public boolean isReady() {
return true;
}
/**
* Returns true if the new patchsets can be accepted for this repository.
*
* @param repository
* @return true if patchsets are being accepted
* @since 1.4.0
*/
public boolean isAcceptingNewPatchsets(RepositoryModel repository) {
return isReady()
&& settings.getBoolean(Keys.tickets.acceptNewPatchsets, true)
&& repository.acceptNewPatchsets
&& isAcceptingTicketUpdates(repository);
}
/**
* Returns true if new tickets can be manually created for this repository.
* This is separate from accepting patchsets.
*
* @param repository
* @return true if tickets are being accepted
* @since 1.4.0
*/
public boolean isAcceptingNewTickets(RepositoryModel repository) {
return isReady()
&& settings.getBoolean(Keys.tickets.acceptNewTickets, true)
&& repository.acceptNewTickets
&& isAcceptingTicketUpdates(repository);
}
/**
* Returns true if ticket updates are allowed for this repository.
*
* @param repository
* @return true if tickets are allowed to be updated
* @since 1.4.0
*/
public boolean isAcceptingTicketUpdates(RepositoryModel repository) {
return isReady()
&& repository.hasCommits
&& repository.isBare
&& !repository.isFrozen
&& !repository.isMirror;
}
/**
* Returns true if the repository has any tickets
* @param repository
* @return true if the repository has tickets
* @since 1.4.0
*/
public boolean hasTickets(RepositoryModel repository) {
return indexer.hasTickets(repository);
}
/**
* Closes any open resources used by this service.
* @since 1.4.0
*/
protected abstract void close();
/**
* Reset all caches in the service.
* @since 1.4.0
*/
public final synchronized void resetCaches() {
ticketsCache.invalidateAll();
labelsCache.clear();
milestonesCache.clear();
resetCachesImpl();
}
/**
* Reset all caches in the service.
* @since 1.4.0
*/
protected abstract void resetCachesImpl();
/**
* Reset any caches for the repository in the service.
* @since 1.4.0
*/
public final synchronized void resetCaches(RepositoryModel repository) {
List<TicketKey> repoKeys = new ArrayList<TicketKey>();
for (TicketKey key : ticketsCache.asMap().keySet()) {
if (key.repository.equals(repository.name)) {
repoKeys.add(key);
}
}
ticketsCache.invalidateAll(repoKeys);
labelsCache.remove(repository.name);
milestonesCache.remove(repository.name);
resetCachesImpl(repository);
}
/**
* Reset the caches for the specified repository.
*
* @param repository
* @since 1.4.0
*/
protected abstract void resetCachesImpl(RepositoryModel repository);
/**
* Returns the list of labels for the repository.
*
* @param repository
* @return the list of labels
* @since 1.4.0
*/
public List<TicketLabel> getLabels(RepositoryModel repository) {
String key = repository.name;
if (labelsCache.containsKey(key)) {
return labelsCache.get(key);
}
List<TicketLabel> list = new ArrayList<TicketLabel>();
Repository db = repositoryManager.getRepository(repository.name);
try {
StoredConfig config = db.getConfig();
Set<String> names = config.getSubsections(LABEL);
for (String name : names) {
TicketLabel label = new TicketLabel(name);
label.color = config.getString(LABEL, name, COLOR);
list.add(label);
}
labelsCache.put(key, Collections.unmodifiableList(list));
} catch (Exception e) {
log.error("invalid tickets settings for " + repository, e);
} finally {
db.close();
}
return list;
}
/**
* Returns a TicketLabel object for a given label. If the label is not
* found, a ticket label object is created.
*
* @param repository
* @param label
* @return a TicketLabel
* @since 1.4.0
*/
public TicketLabel getLabel(RepositoryModel repository, String label) {
for (TicketLabel tl : getLabels(repository)) {
if (tl.name.equalsIgnoreCase(label)) {
String q = QueryBuilder.q(Lucene.rid.matches(repository.getRID())).and(Lucene.labels.matches(label)).build();
tl.tickets = indexer.queryFor(q, 1, 0, Lucene.number.name(), true);
return tl;
}
}
return new TicketLabel(label);
}
/**
* Creates a label.
*
* @param repository
* @param milestone
* @param createdBy
* @return the label
* @since 1.4.0
*/
public synchronized TicketLabel createLabel(RepositoryModel repository, String label, String createdBy) {
TicketLabel lb = new TicketMilestone(label);
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.setString(LABEL, label, COLOR, lb.color);
config.save();
} catch (IOException e) {
log.error("failed to create label " + label + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return lb;
}
/**
* Updates a label.
*
* @param repository
* @param label
* @param createdBy
* @return true if the update was successful
* @since 1.4.0
*/
public synchronized boolean updateLabel(RepositoryModel repository, TicketLabel label, String createdBy) {
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.setString(LABEL, label.name, COLOR, label.color);
config.save();
return true;
} catch (IOException e) {
log.error("failed to update label " + label + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Renames a label.
*
* @param repository
* @param oldName
* @param newName
* @param createdBy
* @return true if the rename was successful
* @since 1.4.0
*/
public synchronized boolean renameLabel(RepositoryModel repository, String oldName, String newName, String createdBy) {
if (StringUtils.isEmpty(newName)) {
throw new IllegalArgumentException("new label can not be empty!");
}
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
TicketLabel label = getLabel(repository, oldName);
StoredConfig config = db.getConfig();
config.unsetSection(LABEL, oldName);
config.setString(LABEL, newName, COLOR, label.color);
config.save();
for (QueryResult qr : label.tickets) {
Change change = new Change(createdBy);
change.unlabel(oldName);
change.label(newName);
updateTicket(repository, qr.number, change);
}
return true;
} catch (IOException e) {
log.error("failed to rename label " + oldName + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Deletes a label.
*
* @param repository
* @param label
* @param createdBy
* @return true if the delete was successful
* @since 1.4.0
*/
public synchronized boolean deleteLabel(RepositoryModel repository, String label, String createdBy) {
if (StringUtils.isEmpty(label)) {
throw new IllegalArgumentException("label can not be empty!");
}
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.unsetSection(LABEL, label);
config.save();
return true;
} catch (IOException e) {
log.error("failed to delete label " + label + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Returns the list of milestones for the repository.
*
* @param repository
* @return the list of milestones
* @since 1.4.0
*/
public List<TicketMilestone> getMilestones(RepositoryModel repository) {
String key = repository.name;
if (milestonesCache.containsKey(key)) {
return milestonesCache.get(key);
}
List<TicketMilestone> list = new ArrayList<TicketMilestone>();
Repository db = repositoryManager.getRepository(repository.name);
try {
StoredConfig config = db.getConfig();
Set<String> names = config.getSubsections(MILESTONE);
for (String name : names) {
TicketMilestone milestone = new TicketMilestone(name);
milestone.status = Status.fromObject(config.getString(MILESTONE, name, STATUS), milestone.status);
milestone.color = config.getString(MILESTONE, name, COLOR);
String due = config.getString(MILESTONE, name, DUE);
if (!StringUtils.isEmpty(due)) {
try {
milestone.due = new SimpleDateFormat(DUE_DATE_PATTERN).parse(due);
} catch (ParseException e) {
log.error("failed to parse {} milestone {} due date \"{}\"",
new Object [] { repository, name, due });
}
}
list.add(milestone);
}
milestonesCache.put(key, Collections.unmodifiableList(list));
} catch (Exception e) {
log.error("invalid tickets settings for " + repository, e);
} finally {
db.close();
}
return list;
}
/**
* Returns the list of milestones for the repository that match the status.
*
* @param repository
* @param status
* @return the list of milestones
* @since 1.4.0
*/
public List<TicketMilestone> getMilestones(RepositoryModel repository, Status status) {
List<TicketMilestone> matches = new ArrayList<TicketMilestone>();
for (TicketMilestone milestone : getMilestones(repository)) {
if (status == milestone.status) {
matches.add(milestone);
}
}
return matches;
}
/**
* Returns the specified milestone or null if the milestone does not exist.
*
* @param repository
* @param milestone
* @return the milestone or null if it does not exist
* @since 1.4.0
*/
public TicketMilestone getMilestone(RepositoryModel repository, String milestone) {
for (TicketMilestone ms : getMilestones(repository)) {
if (ms.name.equalsIgnoreCase(milestone)) {
TicketMilestone tm = DeepCopier.copy(ms);
String q = QueryBuilder.q(Lucene.rid.matches(repository.getRID())).and(Lucene.milestone.matches(milestone)).build();
tm.tickets = indexer.queryFor(q, 1, 0, Lucene.number.name(), true);
return tm;
}
}
return null;
}
/**
* Creates a milestone.
*
* @param repository
* @param milestone
* @param createdBy
* @return the milestone
* @since 1.4.0
*/
public synchronized TicketMilestone createMilestone(RepositoryModel repository, String milestone, String createdBy) {
TicketMilestone ms = new TicketMilestone(milestone);
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.setString(MILESTONE, milestone, STATUS, ms.status.name());
config.setString(MILESTONE, milestone, COLOR, ms.color);
config.save();
milestonesCache.remove(repository.name);
} catch (IOException e) {
log.error("failed to create milestone " + milestone + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return ms;
}
/**
* Updates a milestone.
*
* @param repository
* @param milestone
* @param createdBy
* @return true if successful
* @since 1.4.0
*/
public synchronized boolean updateMilestone(RepositoryModel repository, TicketMilestone milestone, String createdBy) {
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.setString(MILESTONE, milestone.name, STATUS, milestone.status.name());
config.setString(MILESTONE, milestone.name, COLOR, milestone.color);
if (milestone.due != null) {
config.setString(MILESTONE, milestone.name, DUE,
new SimpleDateFormat(DUE_DATE_PATTERN).format(milestone.due));
}
config.save();
milestonesCache.remove(repository.name);
return true;
} catch (IOException e) {
log.error("failed to update milestone " + milestone + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Renames a milestone.
*
* @param repository
* @param oldName
* @param newName
* @param createdBy
* @return true if successful
* @since 1.4.0
*/
public synchronized boolean renameMilestone(RepositoryModel repository, String oldName, String newName, String createdBy) {
return renameMilestone(repository, oldName, newName, createdBy, true);
}
/**
* Renames a milestone.
*
* @param repository
* @param oldName
* @param newName
* @param createdBy
* @param notifyOpenTickets
* @return true if successful
* @since 1.6.0
*/
public synchronized boolean renameMilestone(RepositoryModel repository, String oldName,
String newName, String createdBy, boolean notifyOpenTickets) {
if (StringUtils.isEmpty(newName)) {
throw new IllegalArgumentException("new milestone can not be empty!");
}
Repository db = null;
try {
db = repositoryManager.getRepository(repository.name);
TicketMilestone tm = getMilestone(repository, oldName);
if (tm == null) {
return false;
}
StoredConfig config = db.getConfig();
config.unsetSection(MILESTONE, oldName);
config.setString(MILESTONE, newName, STATUS, tm.status.name());
config.setString(MILESTONE, newName, COLOR, tm.color);
if (tm.due != null) {
config.setString(MILESTONE, newName, DUE,
new SimpleDateFormat(DUE_DATE_PATTERN).format(tm.due));
}
config.save();
milestonesCache.remove(repository.name);
TicketNotifier notifier = createNotifier();
for (QueryResult qr : tm.tickets) {
Change change = new Change(createdBy);
change.setField(Field.milestone, newName);
TicketModel ticket = updateTicket(repository, qr.number, change);
if (notifyOpenTickets && ticket.isOpen()) {
notifier.queueMailing(ticket);
}
}
if (notifyOpenTickets) {
notifier.sendAll();
}
return true;
} catch (IOException e) {
log.error("failed to rename milestone " + oldName + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Deletes a milestone.
*
* @param repository
* @param milestone
* @param createdBy
* @return true if successful
* @since 1.4.0
*/
public synchronized boolean deleteMilestone(RepositoryModel repository, String milestone, String createdBy) {
return deleteMilestone(repository, milestone, createdBy, true);
}
/**
* Deletes a milestone.
*
* @param repository
* @param milestone
* @param createdBy
* @param notifyOpenTickets
* @return true if successful
* @since 1.6.0
*/
public synchronized boolean deleteMilestone(RepositoryModel repository, String milestone,
String createdBy, boolean notifyOpenTickets) {
if (StringUtils.isEmpty(milestone)) {
throw new IllegalArgumentException("milestone can not be empty!");
}
Repository db = null;
try {
TicketMilestone tm = getMilestone(repository, milestone);
if (tm == null) {
return false;
}
db = repositoryManager.getRepository(repository.name);
StoredConfig config = db.getConfig();
config.unsetSection(MILESTONE, milestone);
config.save();
milestonesCache.remove(repository.name);
TicketNotifier notifier = createNotifier();
for (QueryResult qr : tm.tickets) {
Change change = new Change(createdBy);
change.setField(Field.milestone, "");
TicketModel ticket = updateTicket(repository, qr.number, change);
if (notifyOpenTickets && ticket.isOpen()) {
notifier.queueMailing(ticket);
}
}
if (notifyOpenTickets) {
notifier.sendAll();
}
return true;
} catch (IOException e) {
log.error("failed to delete milestone " + milestone + " in " + repository, e);
} finally {
if (db != null) {
db.close();
}
}
return false;
}
/**
* Returns the set of assigned ticket ids in the repository.
*
* @param repository
* @return a set of assigned ticket ids in the repository
* @since 1.6.0
*/
public abstract Set<Long> getIds(RepositoryModel repository);
/**
* Assigns a new ticket id.
*
* @param repository
* @return a new ticket id
* @since 1.4.0
*/
public abstract long assignNewId(RepositoryModel repository);
/**
* Ensures that we have a ticket for this ticket id.
*
* @param repository
* @param ticketId
* @return true if the ticket exists
* @since 1.4.0
*/
public abstract boolean hasTicket(RepositoryModel repository, long ticketId);
/**
* Returns all tickets. This is not a Lucene search!
*
* @param repository
* @return all tickets
* @since 1.4.0
*/
public List<TicketModel> getTickets(RepositoryModel repository) {
return getTickets(repository, null);
}
/**
* Returns all tickets that satisfy the filter. Retrieving tickets from the
* service requires deserializing all journals and building ticket models.
* This is an expensive process and not recommended. Instead, the queryFor
* method should be used which executes against the Lucene index.
*
* @param repository
* @param filter
* optional issue filter to only return matching results
* @return a list of tickets
* @since 1.4.0
*/
public abstract List<TicketModel> getTickets(RepositoryModel repository, TicketFilter filter);
/**
* Retrieves the ticket.
*
* @param repository
* @param ticketId
* @return a ticket, if it exists, otherwise null
* @since 1.4.0
*/
public final TicketModel getTicket(RepositoryModel repository, long ticketId) {
TicketKey key = new TicketKey(repository, ticketId);
TicketModel ticket = ticketsCache.getIfPresent(key);
// if ticket not cached
if (ticket == null) {
//load ticket
ticket = getTicketImpl(repository, ticketId);
// if ticket exists
if (ticket != null) {
if (ticket.hasPatchsets() && updateDiffstats) {
Repository r = repositoryManager.getRepository(repository.name);
try {
Patchset patchset = ticket.getCurrentPatchset();
DiffStat diffStat = DiffUtils.getDiffStat(r, patchset.base, patchset.tip);
// diffstat could be null if we have ticket data without the
// commit objects. e.g. ticket replication without repo
// mirroring
if (diffStat != null) {
ticket.insertions = diffStat.getInsertions();
ticket.deletions = diffStat.getDeletions();
}
} finally {
r.close();
}
}
//cache ticket
ticketsCache.put(key, ticket);
}
}
return ticket;
}
/**
* Retrieves the ticket.
*
* @param repository
* @param ticketId
* @return a ticket, if it exists, otherwise null
* @since 1.4.0
*/
protected abstract TicketModel getTicketImpl(RepositoryModel repository, long ticketId);
/**
* Returns the journal used to build a ticket.
*
* @param repository
* @param ticketId
* @return the journal for the ticket, if it exists, otherwise null
* @since 1.6.0
*/
public final List<Change> getJournal(RepositoryModel repository, long ticketId) {
if (hasTicket(repository, ticketId)) {
List<Change> journal = getJournalImpl(repository, ticketId);
return journal;
}
return null;
}
/**
* Retrieves the ticket journal.
*
* @param repository
* @param ticketId
* @return a ticket, if it exists, otherwise null
* @since 1.6.0
*/
protected abstract List<Change> getJournalImpl(RepositoryModel repository, long ticketId);
/**
* Get the ticket url
*
* @param ticket
* @return the ticket url
* @since 1.4.0
*/
public String getTicketUrl(TicketModel ticket) {
final String canonicalUrl = settings.getString(Keys.web.canonicalUrl, "https://localhost:8443");
final String hrefPattern = "{0}/tickets?r={1}&h={2,number,0}";
return MessageFormat.format(hrefPattern, canonicalUrl, ticket.repository, ticket.number);
}
/**
* Get the compare url
*
* @param base
* @param tip
* @return the compare url
* @since 1.4.0
*/
public String getCompareUrl(TicketModel ticket, String base, String tip) {
final String canonicalUrl = settings.getString(Keys.web.canonicalUrl, "https://localhost:8443");
final String hrefPattern = "{0}/compare?r={1}&h={2}..{3}";
return MessageFormat.format(hrefPattern, canonicalUrl, ticket.repository, base, tip);
}
/**
* Returns true if attachments are supported.
*
* @return true if attachments are supported
* @since 1.4.0
*/
public abstract boolean supportsAttachments();
/**
* Retrieves the specified attachment from a ticket.
*
* @param repository
* @param ticketId
* @param filename
* @return an attachment, if found, null otherwise
* @since 1.4.0
*/
public abstract Attachment getAttachment(RepositoryModel repository, long ticketId, String filename);
/**
* Creates a ticket. Your change must include a repository, author & title,
* at a minimum. If your change does not have those minimum requirements a
* RuntimeException will be thrown.
*
* @param repository
* @param change
* @return true if successful
* @since 1.4.0
*/
public TicketModel createTicket(RepositoryModel repository, Change change) {
return createTicket(repository, 0L, change);
}
/**
* Creates a ticket. Your change must include a repository, author & title,
* at a minimum. If your change does not have those minimum requirements a
* RuntimeException will be thrown.
*
* @param repository
* @param ticketId (if <=0 the ticket id will be assigned)
* @param change
* @return true if successful
* @since 1.4.0
*/
public TicketModel createTicket(RepositoryModel repository, long ticketId, Change change) {
if (repository == null) {
throw new RuntimeException("Must specify a repository!");
}
if (StringUtils.isEmpty(change.author)) {
throw new RuntimeException("Must specify a change author!");
}
if (!change.hasField(Field.title)) {
throw new RuntimeException("Must specify a title!");
}
change.watch(change.author);
if (ticketId <= 0L) {
ticketId = assignNewId(repository);
}
change.setField(Field.status, Status.New);
boolean success = commitChangeImpl(repository, ticketId, change);
if (success) {
TicketModel ticket = getTicket(repository, ticketId);
indexer.index(ticket);
// call the ticket hooks
if (pluginManager != null) {
for (TicketHook hook : pluginManager.getExtensions(TicketHook.class)) {
try {
hook.onNewTicket(ticket);
} catch (Exception e) {
log.error("Failed to execute extension", e);
}
}
}
return ticket;
}
return null;
}
/**
* Updates a ticket.
*
* @param repository
* @param ticketId
* @param change
* @return the ticket model if successful
* @since 1.4.0
*/
public final TicketModel updateTicket(RepositoryModel repository, long ticketId, Change change) {
if (change == null) {
throw new RuntimeException("change can not be null!");
}
if (StringUtils.isEmpty(change.author)) {
throw new RuntimeException("must specify a change author!");
}
TicketKey key = new TicketKey(repository, ticketId);
ticketsCache.invalidate(key);
boolean success = commitChangeImpl(repository, ticketId, change);
if (success) {
TicketModel ticket = getTicket(repository, ticketId);
ticketsCache.put(key, ticket);
indexer.index(ticket);
// call the ticket hooks
if (pluginManager != null) {
for (TicketHook hook : pluginManager.getExtensions(TicketHook.class)) {
try {
hook.onUpdateTicket(ticket, change);
} catch (Exception e) {
log.error("Failed to execute extension", e);
}
}
}
return ticket;
}
return null;
}
/**
* Deletes all tickets in every repository.
*
* @return true if successful
* @since 1.4.0
*/
public boolean deleteAll() {
List<String> repositories = repositoryManager.getRepositoryList();
BitSet bitset = new BitSet(repositories.size());
for (int i = 0; i < repositories.size(); i++) {
String name = repositories.get(i);
RepositoryModel repository = repositoryManager.getRepositoryModel(name);
boolean success = deleteAll(repository);
bitset.set(i, success);
}
boolean success = bitset.cardinality() == repositories.size();
if (success) {
indexer.deleteAll();
resetCaches();
}
return success;
}
/**
* Deletes all tickets in the specified repository.
* @param repository
* @return true if succesful
* @since 1.4.0
*/
public boolean deleteAll(RepositoryModel repository) {
boolean success = deleteAllImpl(repository);
if (success) {
log.info("Deleted all tickets for {}", repository.name);
resetCaches(repository);
indexer.deleteAll(repository);
}
return success;
}
/**
* Delete all tickets for the specified repository.
* @param repository
* @return true if successful
* @since 1.4.0
*/
protected abstract boolean deleteAllImpl(RepositoryModel repository);
/**
* Handles repository renames.
*
* @param oldRepositoryName
* @param newRepositoryName
* @return true if successful
* @since 1.4.0
*/
public boolean rename(RepositoryModel oldRepository, RepositoryModel newRepository) {
if (renameImpl(oldRepository, newRepository)) {
resetCaches(oldRepository);
indexer.deleteAll(oldRepository);
reindex(newRepository);
return true;
}
return false;
}
/**
* Renames a repository.
*
* @param oldRepository
* @param newRepository
* @return true if successful
* @since 1.4.0
*/
protected abstract boolean renameImpl(RepositoryModel oldRepository, RepositoryModel newRepository);
/**
* Deletes a ticket.
*
* @param repository
* @param ticketId
* @param deletedBy
* @return true if successful
* @since 1.4.0
*/
public boolean deleteTicket(RepositoryModel repository, long ticketId, String deletedBy) {
TicketModel ticket = getTicket(repository, ticketId);
boolean success = deleteTicketImpl(repository, ticket, deletedBy);
if (success) {
log.info(MessageFormat.format("Deleted {0} ticket #{1,number,0}: {2}",
repository.name, ticketId, ticket.title));
ticketsCache.invalidate(new TicketKey(repository, ticketId));
indexer.delete(ticket);
return true;
}
return false;
}
/**
* Deletes a ticket.
*
* @param repository
* @param ticket
* @param deletedBy
* @return true if successful
* @since 1.4.0
*/
protected abstract boolean deleteTicketImpl(RepositoryModel repository, TicketModel ticket, String deletedBy);
/**
* Updates the text of an ticket comment.
*
* @param ticket
* @param commentId
* the id of the comment to revise
* @param updatedBy
* the author of the updated comment
* @param comment
* the revised comment
* @return the revised ticket if the change was successful
* @since 1.4.0
*/
public final TicketModel updateComment(TicketModel ticket, String commentId,
String updatedBy, String comment) {
Change revision = new Change(updatedBy);
revision.comment(comment);
revision.comment.id = commentId;
RepositoryModel repository = repositoryManager.getRepositoryModel(ticket.repository);
TicketModel revisedTicket = updateTicket(repository, ticket.number, revision);
return revisedTicket;
}
/**
* Deletes a comment from a ticket.
*
* @param ticket
* @param commentId
* the id of the comment to delete
* @param deletedBy
* the user deleting the comment
* @return the revised ticket if the deletion was successful
* @since 1.4.0
*/
public final TicketModel deleteComment(TicketModel ticket, String commentId, String deletedBy) {
Change deletion = new Change(deletedBy);
deletion.comment("");
deletion.comment.id = commentId;
deletion.comment.deleted = true;
RepositoryModel repository = repositoryManager.getRepositoryModel(ticket.repository);
TicketModel revisedTicket = updateTicket(repository, ticket.number, deletion);
return revisedTicket;
}
/**
* Commit a ticket change to the repository.
*
* @param repository
* @param ticketId
* @param change
* @return true, if the change was committed
* @since 1.4.0
*/
protected abstract boolean commitChangeImpl(RepositoryModel repository, long ticketId, Change change);
/**
* Searches for the specified text. This will use the indexer, if available,
* or will fall back to brute-force retrieval of all tickets and string
* matching.
*
* @param repository
* @param text
* @param page
* @param pageSize
* @return a list of matching tickets
* @since 1.4.0
*/
public List<QueryResult> searchFor(RepositoryModel repository, String text, int page, int pageSize) {
return indexer.searchFor(repository, text, page, pageSize);
}
/**
* Queries the index for the matching tickets.
*
* @param query
* @param page
* @param pageSize
* @param sortBy
* @param descending
* @return a list of matching tickets or an empty list
* @since 1.4.0
*/
public List<QueryResult> queryFor(String query, int page, int pageSize, String sortBy, boolean descending) {
return indexer.queryFor(query, page, pageSize, sortBy, descending);
}
/**
* Destroys an existing index and reindexes all tickets.
* This operation may be expensive and time-consuming.
* @since 1.4.0
*/
public void reindex() {
long start = System.nanoTime();
indexer.deleteAll();
for (String name : repositoryManager.getRepositoryList()) {
RepositoryModel repository = repositoryManager.getRepositoryModel(name);
try {
List<TicketModel> tickets = getTickets(repository);
if (!tickets.isEmpty()) {
log.info("reindexing {} tickets from {} ...", tickets.size(), repository);
indexer.index(tickets);
System.gc();
}
} catch (Exception e) {
log.error("failed to reindex {}", repository.name);
log.error(null, e);
}
}
long end = System.nanoTime();
long secs = TimeUnit.NANOSECONDS.toMillis(end - start);
log.info("reindexing completed in {} msecs.", secs);
}
/**
* Destroys any existing index and reindexes all tickets.
* This operation may be expensive and time-consuming.
* @since 1.4.0
*/
public void reindex(RepositoryModel repository) {
long start = System.nanoTime();
List<TicketModel> tickets = getTickets(repository);
indexer.index(tickets);
log.info("reindexing {} tickets from {} ...", tickets.size(), repository);
long end = System.nanoTime();
long secs = TimeUnit.NANOSECONDS.toMillis(end - start);
log.info("reindexing completed in {} msecs.", secs);
resetCaches(repository);
}
/**
* Synchronously executes the runnable. This is used for special processing
* of ticket updates, namely merging from the web ui.
*
* @param runnable
* @since 1.4.0
*/
public synchronized void exec(Runnable runnable) {
runnable.run();
}
}
| |
package com.emuneee.marshmallowfm;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaMetadata;
import android.media.MediaPlayer;
import android.media.session.MediaController;
import android.media.session.MediaSession;
import android.media.session.PlaybackState;
import android.net.Uri;
import android.os.Binder;
import android.os.Bundle;
import android.os.IBinder;
import java.io.IOException;
public class AudioPlayerService extends Service implements MediaPlayer.OnPreparedListener,
MediaPlayer.OnBufferingUpdateListener, MediaPlayer.OnCompletionListener {
public static final String SESSION_TAG = "mmFM";
public static final String ACTION_PLAY = "play";
public static final String ACTION_PAUSE = "pause";
public static final String ACTION_FAST_FORWARD = "fastForward";
public static final String ACTION_REWIND = "rewind";
public static final String PARAM_TRACK_URI = "uri";
private MediaSession mMediaSession;
private MediaPlayer mMediaPlayer;
private AudioManager mAudioManager;
private PlaybackState mPlaybackState;
private MediaController mMediaController;
public class ServiceBinder extends Binder {
public AudioPlayerService getService() {
return AudioPlayerService.this;
}
}
private Binder mBinder = new ServiceBinder();
private MediaSession.Callback mMediaSessionCallback = new MediaSession.Callback() {
@Override
public void onPlayFromSearch(String query, Bundle extras) {
Uri uri = extras.getParcelable(PARAM_TRACK_URI);
onPlayFromUri(uri, null);
}
@Override
public void onPlayFromUri(Uri uri, Bundle extras) {
try {
switch (mPlaybackState.getState()) {
case PlaybackState.STATE_PLAYING:
case PlaybackState.STATE_PAUSED:
mMediaPlayer.reset();
mMediaPlayer.setDataSource(AudioPlayerService.this, uri);
mMediaPlayer.prepare();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_CONNECTING, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
mMediaSession.setMetadata(new MediaMetadata.Builder()
.putString(MediaMetadata.METADATA_KEY_ARTIST, "ESPN: PTI")
.putString(MediaMetadata.METADATA_KEY_AUTHOR, "ESPN: PTI")
.putString(MediaMetadata.METADATA_KEY_ALBUM, "ESPN")
.putString(MediaMetadata.METADATA_KEY_TITLE, "Cubs The Favorites?: 10/14/15")
.build());
break;
case PlaybackState.STATE_NONE:
mMediaPlayer.setDataSource(AudioPlayerService.this, uri);
mMediaPlayer.prepare();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_CONNECTING, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
mMediaSession.setMetadata(new MediaMetadata.Builder()
.putString(MediaMetadata.METADATA_KEY_ARTIST, "ESPN: PTI")
.putString(MediaMetadata.METADATA_KEY_AUTHOR, "ESPN: PTI")
.putString(MediaMetadata.METADATA_KEY_ALBUM, "ESPN")
.putString(MediaMetadata.METADATA_KEY_TITLE, "Cubs The Favorites?: 10/14/15")
.build());
break;
}
} catch (IOException e) {
}
}
@Override
public void onPlay() {
switch (mPlaybackState.getState()) {
case PlaybackState.STATE_PAUSED:
mMediaPlayer.start();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_PLAYING, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
updateNotification();
break;
}
}
@Override
public void onPause() {
switch (mPlaybackState.getState()) {
case PlaybackState.STATE_PLAYING:
mMediaPlayer.pause();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_PAUSED, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
updateNotification();
break;
}
}
@Override
public void onRewind() {
switch (mPlaybackState.getState()) {
case PlaybackState.STATE_PLAYING:
mMediaPlayer.seekTo(mMediaPlayer.getCurrentPosition() - 10000);
break;
}
}
@Override
public void onFastForward() {
switch (mPlaybackState.getState()) {
case PlaybackState.STATE_PLAYING:
mMediaPlayer.seekTo(mMediaPlayer.getCurrentPosition() + 10000);
break;
}
}
};
public AudioPlayerService() {
}
public MediaSession.Token getMediaSessionToken() {
return mMediaSession.getSessionToken();
}
@Override
public void onPrepared(MediaPlayer mp) {
mMediaPlayer.start();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_PLAYING, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
updateNotification();
}
@Override
public void onBufferingUpdate(MediaPlayer mp, int percent) {
}
@Override
public void onCompletion(MediaPlayer mp) {
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_NONE, 0, 1.0f)
.build();
mMediaSession.setPlaybackState(mPlaybackState);
mMediaPlayer.reset();
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
@Override
public void onCreate() {
super.onCreate();
mPlaybackState = new PlaybackState.Builder()
.setState(PlaybackState.STATE_NONE, 0, 1.0f)
.build();
// 1) set up media session and media session callback
mMediaSession = new MediaSession(this, SESSION_TAG);
mMediaSession.setCallback(mMediaSessionCallback);
mMediaSession.setActive(true);
mMediaSession.setFlags(MediaSession.FLAG_HANDLES_MEDIA_BUTTONS |
MediaSession.FLAG_HANDLES_TRANSPORT_CONTROLS);
mMediaSession.setPlaybackState(mPlaybackState);
// 2) get instance to AudioManager
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
// 3) create our media player
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setOnCompletionListener(this);
mMediaPlayer.setOnBufferingUpdateListener(this);
// 4) create the media controller
mMediaController = new MediaController(this, mMediaSession.getSessionToken());
}
@Override
public void onDestroy() {
super.onDestroy();
mMediaPlayer.release();
mMediaSession.release();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null && intent.getAction() != null) {
switch (intent.getAction()) {
case ACTION_PLAY:
mMediaController.getTransportControls().play();
break;
case ACTION_FAST_FORWARD:
mMediaController.getTransportControls().fastForward();
break;
case ACTION_REWIND:
mMediaController.getTransportControls().rewind();
break;
case ACTION_PAUSE:
mMediaController.getTransportControls().pause();
break;
}
}
return super.onStartCommand(intent, flags, startId);
}
private Notification.Action createAction(int iconResId, String title, String action) {
Intent intent = new Intent(this, AudioPlayerService.class);
intent.setAction(action);
PendingIntent pendingIntent = PendingIntent.getService(getApplicationContext(), 1, intent, 0);
return new Notification.Action.Builder(iconResId, title, pendingIntent).build();
}
private void updateNotification() {
Notification.Action playPauseAction = mPlaybackState.getState() == PlaybackState.STATE_PLAYING ?
createAction(R.drawable.ic_action_pause, "Pause", ACTION_PAUSE) :
createAction(R.drawable.ic_action_play, "Play", ACTION_PLAY);
Notification notification = new Notification.Builder(this)
.setPriority(Notification.PRIORITY_DEFAULT)
.setVisibility(Notification.VISIBILITY_PUBLIC)
.setCategory(Notification.CATEGORY_TRANSPORT)
.setContentTitle("Cubs The Favorites?: 10/14/15")
.setContentText("ESPN: PTI")
.setOngoing(mPlaybackState.getState() == PlaybackState.STATE_PLAYING)
.setShowWhen(false)
.setSmallIcon(R.mipmap.ic_launcher)
.setAutoCancel(false)
.addAction(createAction(R.drawable.ic_action_rewind, "Rewind", ACTION_REWIND))
.addAction(playPauseAction)
.addAction(createAction(R.drawable.ic_action_fast_forward, "Fast Forward", ACTION_FAST_FORWARD))
.setStyle(new Notification.MediaStyle()
.setMediaSession(mMediaSession.getSessionToken())
.setShowActionsInCompactView(1, 2))
.build();
((NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE)).notify(1, notification);
}
}
| |
// ex: se sts=4 sw=4 expandtab:
/*
* Yeti language compiler java bytecode generator for java foreign interface.
*
* Copyright (c) 2007-2012 Madis Janson
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package yjs.lang.compiler;
import yeti.renamed.asm3.Label;
class JavaExpr extends Code {
Code object;
JavaType.Method method;
Code[] args;
int line;
JavaExpr(Code object, JavaType.Method method, Code[] args, int line) {
this.object = object;
this.method = method;
this.args = args;
this.line = line;
}
// convert to java
private static void convert(Ctx ctx, YType given, YType argType) {
given = given.deref();
argType = argType.deref();
String descr = argType.javaType == null
? "" : argType.javaType.description;
if (argType.type == YetiType.JAVA_ARRAY &&
given.type == YetiType.JAVA_ARRAY) {
ctx.typeInsn(CHECKCAST, JavaType.descriptionOf(argType));
return; // better than thinking, that array was given...
// still FIXME for a case of different arrays
}
if (given.type != YetiType.JAVA &&
(argType.type == YetiType.JAVA_ARRAY ||
argType.type == YetiType.JAVA &&
argType.javaType.isCollection())) {
YType t = argType.param.length != 0
? argType.param[0].deref() : null;
if (argType.type == YetiType.JAVA_ARRAY && t.javaType != null) {
if (t.javaType.description == "B") {
ctx.typeInsn(CHECKCAST, "yeti/lang/AList");
ctx.methodInsn(INVOKESTATIC, "yeti/lang/Core",
"bytes", "(Lyeti/lang/AList;)[B");
return;
}
if (t.javaType.description.charAt(0) == 'L') {
ctx.typeInsn(CHECKCAST, "yeti/lang/AList");
ctx.methodInsn(INVOKESTATIC, "yeti/lang/MList", "ofList",
"(Lyeti/lang/AList;)Lyeti/lang/MList;");
ctx.insn(DUP);
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/MList",
"length", "()J");
ctx.insn(L2I);
new NewArrayExpr(argType, null, 0).gen(ctx);
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/MList", "toArray",
"([Ljava/lang/Object;)[Ljava/lang/Object;");
descr = JavaType.descriptionOf(argType);
ctx.typeInsn(CHECKCAST, descr);
ctx.forceType(descr);
return;
}
}
Label retry = new Label(), end = new Label();
ctx.typeInsn(CHECKCAST, "yeti/lang/AIter"); // i
String tmpClass = descr != "Ljava/lang/Set;"
? "java/util/ArrayList" : "java/util/HashSet";
ctx.typeInsn(NEW, tmpClass); // ia
ctx.insn(DUP); // iaa
ctx.visitInit(tmpClass, "()V"); // ia
ctx.insn(SWAP); // ai
ctx.insn(DUP); // aii
ctx.jumpInsn(IFNULL, end); // ai
ctx.insn(DUP); // aii
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/AIter",
"isEmpty", "()Z"); // aiz
ctx.jumpInsn(IFNE, end); // ai
ctx.visitLabel(retry);
ctx.insn(DUP2); // aiai
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/AIter",
"first", "()Ljava/lang/Object;");
if (t != null && (t.type != YetiType.JAVA ||
t.javaType.description.length() > 1))
convert(ctx, given.param[0], argType.param[0]);
// aiav
ctx.methodInsn(INVOKEVIRTUAL, tmpClass,
"add", "(Ljava/lang/Object;)Z"); // aiz
ctx.insn(POP); // ai
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/AIter",
"next", "()Lyeti/lang/AIter;"); // ai
ctx.insn(DUP); // aii
ctx.jumpInsn(IFNONNULL, retry); // ai
ctx.visitLabel(end);
ctx.insn(POP); // a
if (argType.type != YetiType.JAVA_ARRAY)
return; // a - List/Set
String s = "";
YType argArrayType = argType;
while ((argType = argType.param[0]).type ==
YetiType.JAVA_ARRAY) {
s += "[";
argArrayType = argType;
}
String arrayPrefix = s;
if (s == "" && argType.javaType.description.length() != 1) {
s = argType.javaType.className();
} else {
s += argType.javaType.description;
}
ctx.insn(DUP); // aa
ctx.methodInsn(INVOKEVIRTUAL, tmpClass,
"size", "()I"); // an
if (t.type != YetiType.JAVA ||
(descr = t.javaType.description).length() != 1) {
ctx.typeInsn(ANEWARRAY, s); // aA
ctx.methodInsn(INVOKEVIRTUAL, tmpClass, "toArray",
"([Ljava/lang/Object;)[Ljava/lang/Object;");
if (!s.equals("java/lang/Object")) {
ctx.typeInsn(CHECKCAST,
arrayPrefix + "[" + argType.javaType.description);
}
return; // A - object array
}
// emulate a fucking for loop to fill primitive array
int index = ctx.localVarCount++;
Label next = new Label(), done = new Label();
ctx.insn(DUP); // ann
ctx.varInsn(ISTORE, index); // an
new NewArrayExpr(argArrayType, null, 0).gen(ctx);
ctx.insn(SWAP); // Aa
ctx.visitLabel(next);
ctx.varInsn(ILOAD, index); // Aan
ctx.jumpInsn(IFEQ, done); // Aa
ctx.visitIntInsn(IINC, index); // Aa --index
ctx.insn(DUP2); // AaAa
ctx.varInsn(ILOAD, index); // AaAan
ctx.methodInsn(INVOKEVIRTUAL, tmpClass,
"get", "(I)Ljava/lang/Object;"); // AaAv
if (descr == "Z") {
ctx.typeInsn(CHECKCAST, "java/lang/Boolean");
ctx.methodInsn(INVOKEVIRTUAL, "java/lang/Boolean",
"booleanValue", "()Z");
} else {
ctx.typeInsn(CHECKCAST, "yeti/lang/Num");
convertNum(ctx, descr);
}
ctx.varInsn(ILOAD, index); // AaAvn
int insn = BASTORE;
switch (argType.javaType.description.charAt(0)) {
case 'D': insn = DASTORE; break;
case 'F': insn = FASTORE; break;
case 'I': insn = IASTORE; break;
case 'J': insn = LASTORE; break;
case 'S': insn = SASTORE;
}
if (insn == DASTORE || insn == LASTORE) {
// AaAvvn actually - long and double is 2 entries
ctx.insn(DUP_X2); // AaAnvvn
ctx.insn(POP); // AaAnvv
} else {
ctx.insn(SWAP); // AaAnv
}
ctx.insn(insn); // Aa
ctx.jumpInsn(GOTO, next); // Aa
ctx.visitLabel(done);
ctx.insn(POP); // A
return; // A - primitive array
}
if (given.type == YetiType.STR) {
ctx.typeInsn(CHECKCAST, "java/lang/String");
ctx.insn(DUP);
ctx.fieldInsn(GETSTATIC, "yeti/lang/Core", "UNDEF_STR",
"Ljava/lang/String;");
Label defined = new Label();
ctx.jumpInsn(IF_ACMPNE, defined);
ctx.insn(POP);
ctx.insn(ACONST_NULL);
ctx.visitLabel(defined);
return;
}
if (given.type != YetiType.NUM ||
descr == "Ljava/lang/Object;" ||
descr == "Ljava/lang/Number;") {
if (descr != "Ljava/lang/Object;") {
ctx.typeInsn(CHECKCAST, argType.javaType.className());
}
return;
}
// Convert numbers...
ctx.typeInsn(CHECKCAST, "yeti/lang/Num");
if (descr == "Ljava/math/BigInteger;") {
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/Num",
"toBigInteger", "()Ljava/math/BigInteger;");
return;
}
if (descr == "Ljava/math/BigDecimal;") {
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/Num",
"toBigDecimal", "()Ljava/math/BigDecimal;");
return;
}
String newInstr = null;
if (descr.startsWith("Ljava/lang/")) {
newInstr = argType.javaType.className();
ctx.typeInsn(NEW, newInstr);
ctx.insn(DUP_X1);
ctx.insn(SWAP);
descr = descr == "Ljava/lang/Long;" ? "J" : descr.substring(11, 12);
}
convertNum(ctx, descr);
if (newInstr != null)
ctx.visitInit(newInstr, "(" + descr + ")V");
}
private static void convertNum(Ctx ctx, String descr) {
String method = null;
switch (descr.charAt(0)) {
case 'B': method = "byteValue"; break;
case 'D': method = "doubleValue"; break;
case 'F': method = "floatValue"; break;
case 'I': method = "intValue"; break;
case 'L': return;
case 'J': method = "longValue"; break;
case 'S': method = "shortValue"; break;
}
ctx.methodInsn(INVOKEVIRTUAL, "yeti/lang/Num",
method, "()" + descr);
}
// MethodCall overrides it
void visitInvoke(Ctx ctx, int invokeInsn) {
ctx.methodInsn(invokeInsn, method.classType.javaType.className(),
method.name, method.descr(null));
}
void genCall(Ctx ctx, BindRef[] extraArgs, int invokeInsn) {
for (int i = 0; i < args.length; ++i) {
convertedArg(ctx, args[i], method.arguments[i], line);
}
if (extraArgs != null) {
for (int i = 0; i < extraArgs.length; ++i) {
BindRef arg = extraArgs[i];
CaptureWrapper cw = arg.capture();
if (cw == null) {
arg.gen(ctx);
ctx.captureCast(arg.captureType());
} else {
cw.genPreGet(ctx);
}
}
}
ctx.visitLine(line);
visitInvoke(ctx, invokeInsn);
JavaType jt = method.returnType.javaType;
if (jt != null && jt.description.charAt(0) == 'L')
ctx.forceType(jt.className());
}
static void convertedArg(Ctx ctx, Code arg, YType argType, int line) {
argType = argType.deref();
if (argType.type == YetiType.JAVA) {
// integer arguments can be directly generated
String desc = desc = argType.javaType.description;
if (desc == "I" || desc == "J") {
arg.genInt(ctx, line, desc == "J");
return;
}
}
if (genRawArg(ctx, arg, argType, line))
convert(ctx, arg.type, argType);
else if (argType.type == YetiType.STR)
convertValue(ctx, arg.type.deref()); // for as cast
}
private static boolean genRawArg(Ctx ctx, Code arg,
YType argType, int line) {
YType given = arg.type.deref();
String descr =
argType.javaType == null ? null : argType.javaType.description;
if (descr == "Z") {
// boolean
Label end = new Label(), lie = new Label();
arg.genIf(ctx, lie, false);
ctx.intConst(1);
ctx.jumpInsn(GOTO, end);
ctx.visitLabel(lie);
ctx.intConst(0);
ctx.visitLabel(end);
return false;
}
arg.gen(ctx);
if (given.type == YetiType.UNIT) {
if (!(arg instanceof UnitConstant)) {
ctx.insn(POP);
ctx.insn(ACONST_NULL);
}
return false;
}
ctx.visitLine(line);
if (descr == "C") {
ctx.typeInsn(CHECKCAST, "java/lang/String");
ctx.intConst(0);
ctx.methodInsn(INVOKEVIRTUAL,
"java/lang/String", "charAt", "(I)C");
return false;
}
if (argType.type == YetiType.JAVA_ARRAY &&
given.type == YetiType.STR) {
ctx.typeInsn(CHECKCAST, "java/lang/String");
ctx.methodInsn(INVOKEVIRTUAL,
"java/lang/String", "toCharArray", "()[C");
return false;
}
if (arg instanceof StringConstant || arg instanceof ConcatStrings)
return false;
// conversion from array to list
if (argType.type == YetiType.MAP && given.type == YetiType.JAVA_ARRAY) {
JavaType javaItem = given.param[0].javaType;
if (javaItem != null && javaItem.description.length() == 1) {
String arrayType = "[".concat(javaItem.description);
ctx.typeInsn(CHECKCAST, arrayType);
ctx.methodInsn(INVOKESTATIC, "yeti/lang/PArray",
"wrap", "(" + arrayType + ")Lyeti/lang/AList;");
return false;
}
Label isNull = new Label(), end = new Label();
ctx.typeInsn(CHECKCAST, "[Ljava/lang/Object;");
ctx.insn(DUP);
ctx.jumpInsn(IFNULL, isNull);
boolean toList = argType.param[1].deref().type == YetiType.NONE;
if (toList) {
ctx.insn(DUP);
ctx.insn(ARRAYLENGTH);
ctx.jumpInsn(IFEQ, isNull);
}
if (toList && argType.param[0].deref().type == YetiType.STR) {
// convert null's to undef_str's
ctx.methodInsn(INVOKESTATIC, "yeti/lang/MList", "ofStrArray",
"([Ljava/lang/Object;)Lyeti/lang/MList;");
} else {
ctx.typeInsn(NEW, "yeti/lang/MList");
ctx.insn(DUP_X1);
ctx.insn(SWAP);
ctx.visitInit("yeti/lang/MList", "([Ljava/lang/Object;)V");
}
ctx.jumpInsn(GOTO, end);
ctx.visitLabel(isNull);
ctx.insn(POP);
if (toList) {
ctx.insn(ACONST_NULL);
} else {
ctx.typeInsn(NEW, "yeti/lang/MList");
ctx.insn(DUP);
ctx.visitInit("yeti/lang/MList", "()V");
}
ctx.visitLabel(end);
return false;
}
return argType.type == YetiType.JAVA ||
argType.type == YetiType.JAVA_ARRAY;
}
static void genValue(Ctx ctx, Code arg, YType argType, int line) {
genRawArg(ctx, arg, argType, line);
if (arg.type.deref().type == YetiType.NUM &&
argType.javaType.description.length() == 1) {
ctx.typeInsn(CHECKCAST, "yeti/lang/Num");
convertNum(ctx, argType.javaType.description);
}
}
static void convertValue(Ctx ctx, YType t) {
if (t.type != YetiType.JAVA) {
return; // array, no automatic conversions
}
String descr = t.javaType.description;
if (descr == "V") {
ctx.insn(ACONST_NULL);
} else if (descr == "Ljava/lang/String;") {
Label nonnull = new Label();
// checkcast to not lie later the type with ctx.fieldInsn
ctx.typeInsn(CHECKCAST, "java/lang/String");
ctx.insn(DUP);
ctx.jumpInsn(IFNONNULL, nonnull);
ctx.insn(POP);
ctx.fieldInsn(GETSTATIC, "yeti/lang/Core", "UNDEF_STR",
"Ljava/lang/String;");
ctx.visitLabel(nonnull);
} else if (descr == "Z") {
Label skip = new Label(), end = new Label();
ctx.jumpInsn(IFEQ, skip);
ctx.fieldInsn(GETSTATIC, "java/lang/Boolean", "TRUE",
"Ljava/lang/Boolean;");
ctx.jumpInsn(GOTO, end);
ctx.visitLabel(skip);
ctx.fieldInsn(GETSTATIC, "java/lang/Boolean", "FALSE",
"Ljava/lang/Boolean;");
ctx.visitLabel(end);
} else if (descr == "B" || descr == "S" ||
descr == "I" || descr == "J") {
if (descr == "B") {
ctx.intConst(0xff);
ctx.insn(IAND);
}
ctx.typeInsn(NEW, "yeti/lang/IntNum");
if (descr == "J") {
ctx.insn(DUP_X2);
ctx.insn(DUP_X2);
ctx.insn(POP);
} else {
ctx.insn(DUP_X1);
ctx.insn(SWAP);
}
ctx.visitInit("yeti/lang/IntNum",
descr == "J" ? "(J)V" : "(I)V");
ctx.forceType("yeti/lang/Num");
} else if (descr == "D" || descr == "F") {
ctx.typeInsn(NEW, "yeti/lang/FloatNum");
if (descr == "F") {
ctx.insn(DUP_X1);
ctx.insn(SWAP);
ctx.insn(F2D);
} else {
ctx.insn(DUP_X2);
ctx.insn(DUP_X2);
ctx.insn(POP);
}
ctx.visitInit("yeti/lang/FloatNum", "(D)V");
ctx.forceType("yeti/lang/Num");
} else if (descr == "C") {
ctx.methodInsn(INVOKESTATIC, "java/lang/String",
"valueOf", "(C)Ljava/lang/String;");
ctx.forceType("java/lang/String");
}
}
void gen(Ctx ctx) {
throw new UnsupportedOperationException();
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.shell;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.execution.context.ExecutionContext;
import com.facebook.buck.core.exceptions.BuckUncheckedExecutionException;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.OutputLabel;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.core.rulekey.AddToRuleKey;
import com.facebook.buck.core.rulekey.CustomFieldBehavior;
import com.facebook.buck.core.rulekey.DefaultFieldInputs;
import com.facebook.buck.core.rulekey.ExcludeFromRuleKey;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter;
import com.facebook.buck.io.BuildCellRelativePath;
import com.facebook.buck.io.file.MorePaths;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.SourceSet;
import com.facebook.buck.rules.macros.WorkerMacroArg;
import com.facebook.buck.rules.modern.BuildCellRelativePathFactory;
import com.facebook.buck.rules.modern.Buildable;
import com.facebook.buck.rules.modern.CustomFieldInputs;
import com.facebook.buck.rules.modern.CustomFieldSerialization;
import com.facebook.buck.rules.modern.OutputPath;
import com.facebook.buck.rules.modern.OutputPathResolver;
import com.facebook.buck.rules.modern.PublicOutputPath;
import com.facebook.buck.rules.modern.RemoteExecutionEnabled;
import com.facebook.buck.rules.modern.ValueCreator;
import com.facebook.buck.rules.modern.ValueVisitor;
import com.facebook.buck.sandbox.NoSandboxExecutionStrategy;
import com.facebook.buck.sandbox.SandboxExecutionStrategy;
import com.facebook.buck.sandbox.SandboxProperties;
import com.facebook.buck.shell.programrunner.DirectProgramRunner;
import com.facebook.buck.shell.programrunner.ProgramRunner;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.StepExecutionResults;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.SymlinkTreeStep;
import com.facebook.buck.worker.WorkerJobParams;
import com.facebook.buck.worker.WorkerProcessIdentity;
import com.facebook.buck.worker.WorkerProcessParams;
import com.facebook.buck.worker.WorkerProcessPoolFactory;
import com.facebook.buck.zip.ZipScrubberStep;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
/**
* Buildable for {@link Genrule} suitable for building Genrules directly and also for subclasses
* extending the functionality of a bare Genrule.
*/
public class GenruleBuildable implements Buildable {
/**
* Name of the "srcs" subdirectory in the gen directory tree. GenruleBuildable symlinks all source
* files into this directory and sets this directory to be the working directory of the command.
*/
protected static final String SRC_DIRECTORY_PATTERN = "%s__srcs";
/** The build target for this genrule. */
@AddToRuleKey protected final BuildTarget buildTarget;
/**
* SourceSet for this Genrule, exposed as SRCS in the genrule command.
*
* <p>The order in which elements are specified in the {@code srcs} attribute of a genrule
* matters.
*/
@AddToRuleKey protected final SourceSet srcs;
/**
* The shell command to run to generate the output file. Used as the fallback if neither bash nor
* cmdExe are provided.
*/
@AddToRuleKey protected final Optional<Arg> cmd;
/** The bash shell command to generate the output file. Only used on platforms that have bash. */
@AddToRuleKey protected final Optional<Arg> bash;
/** The cmd shell command to generate the output file. Only used on Windows. */
@AddToRuleKey protected final Optional<Arg> cmdExe;
/**
* The name of the output file that this genrule intends to generate. Should not be present if
* {@link #outputPaths} is present.
*/
@AddToRuleKey protected final Optional<String> out;
/** Whether this target should be executed in a sandbox, if one is supported on this platform. */
@AddToRuleKey private final boolean enableSandboxingInGenrule;
/** The delimiter between paths in environment variables. */
@AddToRuleKey private final String environmentExpansionSeparator;
/** Whether or not the tool being invoked in this genrule is a worker_tool . */
@AddToRuleKey private final boolean isWorkerGenrule;
/**
* The output path of the file generated by this genrule, if present. Note that this output path
* is Public because it uses a folder name that is exactly equal to the target name, unlike other
* MBRs which use the target name suffixed by the flavor (or __ if no flavor is provided). This is
* for backwards compatability with users of Buck that have hardcoded their paths. One and only
* one of {@link #outputPath} and {@link #outputPaths} must be present.
*/
@AddToRuleKey protected final Optional<PublicOutputPath> outputPath;
/**
* The output paths of the files generated by this genrule organized by their output labels.
*
* <p>The paths are relative to the directory buck-out/gen/<target_name>__. For example, if the
* target is named "foo", the output paths in this map would be relative to buck-out/gen/foo__.
* Note that {@link #outputPath} places the output in buck-out/gen/foo.
*
* <p>One and only one of {@link #outputPath} and {@link #outputPaths} must be present.
*/
@AddToRuleKey
protected final Optional<ImmutableMap<OutputLabel, ImmutableSet<OutputPath>>> outputPaths;
/**
* Whether or not this genrule can be cached. This is not used within this class, but is required
* to be a part of the rule key.
*/
@AddToRuleKey protected final boolean isCacheable;
/** Whether or not this genrule can be executed remotely. Fails serialization if false. */
@AddToRuleKey
@CustomFieldBehavior(RemoteExecutionEnabled.class)
private final boolean executeRemotely;
/** Type for this genrule, if one was provided. */
@AddToRuleKey protected final Optional<String> type;
/**
* The set of optional Android tools to make available inside the genrule's environment. This rule
* should not be remote-executed if this field is present!
*/
@ExcludeFromRuleKey(
reason = "GenruleAndroidTools contains paths to things outside of the repo",
serialization = GenruleAndroidToolsBehavior.class,
inputs = DefaultFieldInputs.class)
private final Optional<GenruleAndroidTools> androidTools;
/**
* This genrule's sandbox execution strategy. If sandboxing is enabled for genrules and the
* platform supports it, this strategy can be used to generate a sandbox suitable for running the
* genrule command.
*
* <p>Note that this field is serialized as {@link NoSandboxExecutionStrategy}, since it does not
* make sense to use a sandbox when executing remotely.
*/
@ExcludeFromRuleKey(
reason = "Non-default sandbox execution not useful when executing remotely",
serialization = SandboxExecutionStrategyBehavior.class,
inputs = SandboxExecutionStrategyBehavior.class)
private final SandboxExecutionStrategy sandboxExecutionStrategy;
/**
* Sandbox properties for this genrule. The properties contain the set of permissions available to
* the genrule process. This field is optional since retains a significant amount of memory when
* present, even if left empty.
*
* <p>This field is also serialized as an empty property set since sandboxing does not make sense
* when executing remotely.
*/
@ExcludeFromRuleKey(
reason = "Non-default sandbox execution not useful when executing remotely",
serialization = SandboxPropertiesBehavior.class,
inputs = SandboxPropertiesBehavior.class)
private final Optional<SandboxProperties> sandboxProperties;
public GenruleBuildable(
BuildTarget buildTarget,
ProjectFilesystem filesystem,
SandboxExecutionStrategy sandboxExecutionStrategy,
SourceSet srcs,
Optional<Arg> cmd,
Optional<Arg> bash,
Optional<Arg> cmdExe,
Optional<String> type,
Optional<String> out,
Optional<ImmutableMap<String, ImmutableList<String>>> outs,
boolean enableSandboxingInGenrule,
boolean isCacheable,
String environmentExpansionSeparator,
Optional<SandboxProperties> sandboxProperties,
Optional<GenruleAndroidTools> androidTools,
boolean executeRemotely) {
this.buildTarget = buildTarget;
this.sandboxExecutionStrategy = sandboxExecutionStrategy;
this.srcs = srcs;
this.cmd = cmd;
this.bash = bash;
this.cmdExe = cmdExe;
this.type = type;
this.out = out;
this.enableSandboxingInGenrule = enableSandboxingInGenrule;
this.isCacheable = isCacheable;
this.environmentExpansionSeparator = environmentExpansionSeparator;
this.sandboxProperties = sandboxProperties;
this.isWorkerGenrule = isWorkerGenrule();
this.androidTools = androidTools;
this.executeRemotely = executeRemotely;
Preconditions.checkArgument(
out.isPresent() ^ outs.isPresent(), "Genrule unexpectedly has both 'out' and 'outs'.");
if (outs.isPresent()) {
ImmutableMap<String, ImmutableList<String>> outputs = outs.get();
ImmutableMap.Builder<OutputLabel, ImmutableSet<OutputPath>> mapBuilder =
ImmutableMap.builderWithExpectedSize(outputs.size());
for (Map.Entry<String, ImmutableList<String>> outputLabelToOutputs : outputs.entrySet()) {
mapBuilder.put(
OutputLabel.of(outputLabelToOutputs.getKey()),
outputLabelToOutputs.getValue().stream()
.map(
p -> {
Path path = Paths.get(p);
checkOutputPath(path, p);
return new OutputPath(path);
})
.collect(ImmutableSet.toImmutableSet()));
}
this.outputPaths = Optional.of(mapBuilder.build());
this.outputPath = Optional.empty();
} else {
// Sanity check for the output paths.
this.outputPath = Optional.of(new PublicOutputPath(getLegacyPath(filesystem, out.get())));
this.outputPaths = Optional.empty();
}
}
private Path getLegacyPath(ProjectFilesystem filesystem, String output) {
Path legacyBasePath =
BuildTargetPaths.getGenPath(filesystem, buildTarget, "%s").resolve(output).normalize();
checkOutputPath(legacyBasePath, output);
return legacyBasePath;
}
private void checkOutputPath(Path path, String output) {
if (path.isAbsolute() || output.isEmpty()) {
throw new HumanReadableException(
"The 'out' or 'outs' parameter of genrule %s is '%s', which is not a valid file name.",
buildTarget, output);
}
}
/**
* Returns the set of {@link OutputPath} instances associated with the given {@link OutputLabel}.
*
* <p>If multiple outputs are available, returns either the default or named output group. The
* default output group is the set of all named outputs.
*
* <p>If multiple outputs are not available, returns a set containing the single output.
*/
public ImmutableSet<OutputPath> getOutputs(OutputLabel outputLabel) {
return outputPaths
.map(
paths -> {
if (outputLabel.isDefault()) {
return getAllOutputPaths(paths);
}
ImmutableSet<OutputPath> pathsforLabel = paths.get(outputLabel);
if (pathsforLabel == null) {
throw new HumanReadableException(
"Cannot find output label [%s] for target %s",
outputLabel, buildTarget.getFullyQualifiedName());
}
return pathsforLabel;
})
.orElseGet(
() -> {
Preconditions.checkArgument(
outputLabel.isDefault(),
"Unexpected output label [%s] for target %s. Use 'outs' instead of 'out' to use output labels",
outputLabel,
buildTarget.getFullyQualifiedName());
return ImmutableSet.of(outputPath.get());
});
}
/** Returns a map of output labels to its associated {@link OutputPath} instances. */
public ImmutableMap<OutputLabel, ImmutableSet<OutputPath>> getOutputMap() {
if (!outputPaths.isPresent()) {
return ImmutableMap.of(OutputLabel.defaultLabel(), ImmutableSet.of(outputPath.get()));
}
ImmutableMap<OutputLabel, ImmutableSet<OutputPath>> paths = outputPaths.get();
return ImmutableMap.<OutputLabel, ImmutableSet<OutputPath>>builderWithExpectedSize(
paths.size() + 1)
.putAll(paths)
.put(OutputLabel.defaultLabel(), getAllOutputPaths(paths))
.build();
}
private ImmutableSet<OutputPath> getAllOutputPaths(
ImmutableMap<OutputLabel, ImmutableSet<OutputPath>> paths) {
return paths.values().stream().flatMap(Set::stream).collect(ImmutableSet.toImmutableSet());
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext buildContext,
ProjectFilesystem filesystem,
OutputPathResolver outputPathResolver,
BuildCellRelativePathFactory buildCellPathFactory) {
ImmutableList.Builder<Step> commands = ImmutableList.builder();
// Genrules traditionally used an un-postfixed folder name to deposit their outputs. Modern
// build rules suffix "__" to an unflavored target's output directory. To avoid breaking things,
// we deposit our outputs in un-postfix (legacy) folder name.
//
// Not that it is not sufficient to create the parent directory of `outputPath`; there are rules
// that consist of nested directory and file paths that will not be correct. The contract of
// genrule is that only the legacyBasePath is created. All other paths must be created by the
// shell script.
Path legacyBasePath = BuildTargetPaths.getGenPath(filesystem, buildTarget, "%s");
commands.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(), filesystem, legacyBasePath)));
// If we use the OutputPathResolver's temp path, we don't need to create the directory; it will
// be automatically created for us.
Path tmpPath = outputPathResolver.getTempPath();
// Create a directory to hold all the source files. Ideally this would be under the temp path,
// but there exist tools (namely the Protobuf compiler) that have a hard dependency on the
// compiler's working directory sharing a directory tree with the files being compiled.
Path srcPath = BuildTargetPaths.getGenPath(filesystem, buildTarget, SRC_DIRECTORY_PATTERN);
commands.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(), filesystem, srcPath)));
addSymlinkCommands(buildContext, filesystem, srcPath, commands);
// Create a shell command that corresponds to this.cmd.
if (this.isWorkerGenrule) {
commands.add(
createWorkerShellStep(buildContext, outputPathResolver, filesystem, srcPath, tmpPath));
} else {
commands.add(
createGenruleStep(
buildContext,
outputPathResolver,
filesystem,
srcPath,
tmpPath,
createProgramRunner()));
}
outputPaths.ifPresent(
outputLabelsToPaths ->
outputLabelsToPaths
.values()
.forEach(
paths ->
paths.forEach(
path ->
maybeAddZipperScrubberStep(
filesystem, outputPathResolver, commands, path))));
outputPath.ifPresent(
path -> maybeAddZipperScrubberStep(filesystem, outputPathResolver, commands, path));
return commands.build();
}
private void maybeAddZipperScrubberStep(
ProjectFilesystem filesystem,
OutputPathResolver outputPathResolver,
ImmutableList.Builder<Step> commands,
OutputPath path) {
// ZipScrubberStep requires that its argument path be absolute.
Path pathToOutFile = filesystem.getPathForRelativePath(outputPathResolver.resolvePath(path));
if (MorePaths.getFileExtension(pathToOutFile).equals("zip")) {
commands.add(ZipScrubberStep.of(pathToOutFile));
}
}
@VisibleForTesting
final void addSymlinkCommands(
BuildContext context,
ProjectFilesystem filesystem,
Path pathToSrcDirectory,
ImmutableList.Builder<Step> commands) {
if (srcs.isEmpty()) {
return;
}
Map<Path, Path> linksBuilder = new HashMap<>();
// Symlink all sources into the temp directory so that they can be used in the genrule.
srcs.getNamedSources()
.ifPresent(
srcs ->
addLinksForNamedSources(
context.getSourcePathResolver(), filesystem, srcs, linksBuilder));
srcs.getUnnamedSources()
.ifPresent(
srcs ->
addLinksForAnonymousSources(
context.getSourcePathResolver(), filesystem, srcs, linksBuilder));
commands.add(
new SymlinkTreeStep(
"genrule_srcs",
filesystem,
pathToSrcDirectory,
ImmutableSortedMap.copyOf(linksBuilder)));
}
private void addLinksForNamedSources(
SourcePathResolverAdapter pathResolver,
ProjectFilesystem filesystem,
ImmutableMap<String, SourcePath> srcs,
Map<Path, Path> links) {
srcs.forEach(
(name, src) -> {
Path absolutePath = pathResolver.getAbsolutePath(src);
Path target = filesystem.relativize(absolutePath);
links.put(filesystem.getPath(name), target);
});
}
private void addLinksForAnonymousSources(
SourcePathResolverAdapter pathResolver,
ProjectFilesystem filesystem,
ImmutableSet<SourcePath> srcs,
Map<Path, Path> links) {
// To preserve legacy behavior, we allow duplicate targets and just ignore all but the
// last.
Set<Path> seenTargets = new HashSet<>();
Path basePath =
buildTarget.getCellRelativeBasePath().getPath().toPath(filesystem.getFileSystem());
ImmutableList.copyOf(srcs)
.reverse()
.forEach(
src -> {
Path relativePath = pathResolver.getRelativePath(src);
Path absolutePath = pathResolver.getAbsolutePath(src);
Path canonicalPath = absolutePath.normalize();
// By the time we get this far, all source paths (the keys in the map) have
// been converted
// to paths relative to the project root. We want the path relative to the
// build target, so
// strip the base path.
Path localPath;
if (absolutePath.equals(canonicalPath)) {
if (relativePath.startsWith(basePath) || basePath.toString().isEmpty()) {
localPath = MorePaths.relativize(basePath, relativePath);
} else {
localPath = canonicalPath.getFileName();
}
} else {
localPath = relativePath;
}
Path target = filesystem.relativize(absolutePath);
if (!seenTargets.contains(target)) {
seenTargets.add(target);
links.put(localPath, target);
}
});
}
@VisibleForTesting
final boolean isWorkerGenrule() {
Arg cmdArg = cmd.orElse(null);
Arg bashArg = bash.orElse(null);
Arg cmdExeArg = cmdExe.orElse(null);
if ((cmdArg instanceof WorkerMacroArg)
|| (bashArg instanceof WorkerMacroArg)
|| (cmdExeArg instanceof WorkerMacroArg)) {
if ((cmdArg != null && !(cmdArg instanceof WorkerMacroArg))
|| (bashArg != null && !(bashArg instanceof WorkerMacroArg))
|| (cmdExeArg != null && !(cmdExeArg instanceof WorkerMacroArg))) {
throw new HumanReadableException(
"You cannot use a worker macro in one of the cmd, bash, "
+ "or cmd_exe properties and not in the others for genrule %s.",
buildTarget.getFullyQualifiedName());
}
return true;
}
return false;
}
@VisibleForTesting
public Optional<Arg> getCmd() {
return cmd;
}
private ProgramRunner createProgramRunner() {
if (sandboxExecutionStrategy.isSandboxEnabled() && enableSandboxingInGenrule) {
Preconditions.checkState(
sandboxProperties.isPresent(),
"SandboxProperties must have been calculated earlier if sandboxing was requested");
return sandboxExecutionStrategy.createSandboxProgramRunner(sandboxProperties.get());
}
return new DirectProgramRunner();
}
@VisibleForTesting
final AbstractGenruleStep createGenruleStep(
BuildContext context,
OutputPathResolver outputPathResolver,
ProjectFilesystem filesystem,
Path srcPath,
Path tmpPath,
ProgramRunner programRunner) {
SourcePathResolverAdapter sourcePathResolverAdapter = context.getSourcePathResolver();
// The user's command (this.cmd) should be run from the directory that contains only the
// symlinked files. This ensures that the user can reference only the files that were declared
// as srcs. Without this, a genrule is not guaranteed to be hermetic.
return new AbstractGenruleStep(
filesystem,
new AbstractGenruleStep.CommandString(
Arg.flattenToSpaceSeparatedString(cmd, sourcePathResolverAdapter),
Arg.flattenToSpaceSeparatedString(bash, sourcePathResolverAdapter),
Arg.flattenToSpaceSeparatedString(cmdExe, sourcePathResolverAdapter)),
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), filesystem, srcPath)
.getPathRelativeToBuildCellRoot(),
programRunner) {
@Override
protected void addEnvironmentVariables(
ExecutionContext executionContext,
ImmutableMap.Builder<String, String> environmentVariablesBuilder) {
GenruleBuildable.this.addEnvironmentVariables(
sourcePathResolverAdapter,
outputPathResolver,
filesystem,
srcPath,
tmpPath,
environmentVariablesBuilder);
}
@Override
public StepExecutionResult execute(ExecutionContext context)
throws IOException, InterruptedException {
StepExecutionResult result = super.execute(context);
if (result.getExitCode() != StepExecutionResults.SUCCESS_EXIT_CODE) {
return result;
}
if (outputPaths.isPresent()) {
for (ImmutableSet<OutputPath> paths : outputPaths.get().values()) {
paths.forEach(p -> checkPath(filesystem, outputPathResolver.resolvePath(p)));
}
} else {
checkPath(filesystem, outputPathResolver.resolvePath(outputPath.get()));
}
return result;
}
private void checkPath(ProjectFilesystem filesystem, Path resolvedPath) {
if (!filesystem.exists(resolvedPath)) {
throw new BuckUncheckedExecutionException(
new FileNotFoundException(
String.format(
"Expected file %s to be written from genrule %s. File was not present",
resolvedPath, buildTarget.getFullyQualifiedName())));
}
}
};
}
@VisibleForTesting
final boolean shouldExecuteRemotely() {
return executeRemotely;
}
/**
* Adds the standard set of environment variables to the genrule, which are then exposed to the
* genrule command.
*
* <p>This method populates these well-known environment variables:
*
* <ul>
* <li><code>SRCS</code>, a delimited list of source file inputs to the genrule
* <li><code>OUT</code>, the genrule's output file
* <li><code>GEN_DIR</code>, Buck's gendir
* <li><code>SRCDIR</code>, the symlink-populated source directory readable to the command
* <li><code>TMP</code>, the temp directory usable by the command
* <li><code>ANDROID_HOME</code>, the path to the Android SDK (if present)
* <li><code>DX</code>, the path to the Android DX executable (if present)
* <li><code>ZIPALIGN</code>, the path to the Android Zipalign executable (if present)
* <li><code>AAPT</code>, the path to the Android AAPT executable (if present)
* <li><code>AAPT2</code>, the path to the Android AAPT2 executable (if present)
* <li><code>NDK_HOME</code>, the path to the Android NDK (if present)
* </ul>
*
* This method also sets <code>NO_BUCKD</code> to <code>1</code>.
*
* @param pathResolver Path resolver for resolving paths for <code>SRCS</code>
* @param outputPathResolver Path resolver for resolving <code>OUT</code>
* @param filesystem Filesystem for resolving relative paths for <code>SRCDIR</code> and <code>TMP
* </code>
* @param srcPath Path to the generated symlink source directory
* @param tmpPath Path to the genrule temporary directory
* @param environmentVariablesBuilder Environment map builder
*/
protected void addEnvironmentVariables(
SourcePathResolverAdapter pathResolver,
OutputPathResolver outputPathResolver,
ProjectFilesystem filesystem,
Path srcPath,
Path tmpPath,
ImmutableMap.Builder<String, String> environmentVariablesBuilder) {
outputPath.ifPresent(
path ->
environmentVariablesBuilder.put(
"OUT", filesystem.resolve(outputPathResolver.resolvePath(path)).toString()));
outputPaths.ifPresent(
paths ->
environmentVariablesBuilder.put(
"OUT", filesystem.resolve(outputPathResolver.getRootPath()).toString()));
environmentVariablesBuilder.put(
"SRCS",
srcs.getPaths().stream()
.map(pathResolver::getAbsolutePath)
.map(Object::toString)
.collect(Collectors.joining(this.environmentExpansionSeparator)));
environmentVariablesBuilder.put(
"GEN_DIR", filesystem.resolve(filesystem.getBuckPaths().getGenDir()).toString());
environmentVariablesBuilder.put("SRCDIR", filesystem.resolve(srcPath).toString());
environmentVariablesBuilder.put("TMP", filesystem.resolve(tmpPath).toString());
// TODO(mbolin): This entire hack needs to be removed. The [tools] section of .buckconfig
// should be generalized to specify local paths to tools that can be used in genrules.
androidTools.ifPresent(
tools -> {
environmentVariablesBuilder.put("ANDROID_HOME", tools.getAndroidSdkLocation().toString());
environmentVariablesBuilder.put("DX", tools.getAndroidPathToDx().toString());
environmentVariablesBuilder.put("ZIPALIGN", tools.getAndroidPathToZipalign().toString());
environmentVariablesBuilder.put(
"AAPT", String.join(" ", tools.getAaptTool().getCommandPrefix(pathResolver)));
environmentVariablesBuilder.put(
"AAPT2", String.join(" ", tools.getAapt2Tool().getCommandPrefix(pathResolver)));
tools
.getAndroidNdkLocation()
.ifPresent(ndk -> environmentVariablesBuilder.put("NDK_HOME", ndk.toString()));
});
// TODO(t5302074): This shouldn't be necessary. Speculatively disabling.
environmentVariablesBuilder.put("NO_BUCKD", "1");
}
private WorkerShellStep createWorkerShellStep(
BuildContext context,
OutputPathResolver outputPathResolver,
ProjectFilesystem filesystem,
Path srcPath,
Path tmpPath) {
return new WorkerShellStep(
buildTarget,
convertToWorkerJobParams(context.getSourcePathResolver(), cmd),
convertToWorkerJobParams(context.getSourcePathResolver(), bash),
convertToWorkerJobParams(context.getSourcePathResolver(), cmdExe),
new WorkerProcessPoolFactory(filesystem)) {
@Override
protected ImmutableMap<String, String> getEnvironmentVariables() {
ImmutableMap.Builder<String, String> envVarBuilder = ImmutableMap.builder();
GenruleBuildable.this.addEnvironmentVariables(
context.getSourcePathResolver(),
outputPathResolver,
filesystem,
srcPath,
tmpPath,
envVarBuilder);
return envVarBuilder.build();
}
};
}
private static Optional<WorkerJobParams> convertToWorkerJobParams(
SourcePathResolverAdapter resolver, Optional<Arg> arg) {
return arg.map(
arg1 -> {
WorkerMacroArg workerMacroArg = (WorkerMacroArg) arg1;
return WorkerJobParams.of(
workerMacroArg.getJobArgs(resolver),
WorkerProcessParams.of(
workerMacroArg.getTempDir(),
workerMacroArg.getStartupCommand(),
workerMacroArg.getEnvironment(),
workerMacroArg.getMaxWorkers(),
workerMacroArg.getPersistentWorkerKey().isPresent()
? Optional.of(
WorkerProcessIdentity.of(
workerMacroArg.getPersistentWorkerKey().get(),
workerMacroArg.getWorkerHash()))
: Optional.empty()));
});
}
/**
* Serialization strategy for {@link SandboxExecutionStrategy} that deserializes to only {@link
* NoSandboxExecutionStrategy} and takes up no bytes on the wire.
*/
private static class SandboxExecutionStrategyBehavior
implements CustomFieldSerialization<SandboxExecutionStrategy>,
CustomFieldInputs<SandboxExecutionStrategy> {
@Override
public void getInputs(SandboxExecutionStrategy value, Consumer<SourcePath> consumer) {
// No inputs, don't populate anything.
}
@Override
public <E extends Exception> void serialize(
SandboxExecutionStrategy value, ValueVisitor<E> serializer) throws E {
// Don't place anything on the wire, there's no information to convey.
}
@Override
public <E extends Exception> SandboxExecutionStrategy deserialize(ValueCreator<E> deserializer)
throws E {
return new NoSandboxExecutionStrategy();
}
}
/**
* Serialization strategy for {@link SandboxProperties} that deserializes to the default value of
* the builder and takes up no bytes on the wire.
*/
private static class SandboxPropertiesBehavior
implements CustomFieldSerialization<Optional<SandboxProperties>>,
CustomFieldInputs<Optional<SandboxProperties>> {
@Override
public void getInputs(Optional<SandboxProperties> value, Consumer<SourcePath> consumer) {
// No inputs, don't populate anything.
}
@Override
public <E extends Exception> void serialize(
Optional<SandboxProperties> value, ValueVisitor<E> serializer) throws E {
// Don't place anything on the wire, there's no information to convey.
}
@Override
public <E extends Exception> Optional<SandboxProperties> deserialize(
ValueCreator<E> deserializer) throws E {
return Optional.empty();
}
}
private static class GenruleAndroidToolsBehavior
implements CustomFieldSerialization<Optional<GenruleAndroidToolsBehavior>> {
@Override
public <E extends Exception> void serialize(
Optional<GenruleAndroidToolsBehavior> value, ValueVisitor<E> serializer) throws E {
if (value.isPresent()) {
throw new DisableRemoteExecutionException();
}
}
@Override
public <E extends Exception> Optional<GenruleAndroidToolsBehavior> deserialize(
ValueCreator<E> deserializer) throws E {
return Optional.empty();
}
private static class DisableRemoteExecutionException extends HumanReadableException {
public DisableRemoteExecutionException() {
super("Remote execution is not available to genrules that need Android tools.");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.client.deployment.application;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.util.FileUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* {@code FromClasspathEntryClassInformationProvider} assumes the passed job class being available
* on some classpath.
*/
public class FromClasspathEntryClassInformationProvider implements EntryClassInformationProvider {
private final String jobClassName;
/**
* Creates a {@code FromClasspathEntryClassInformationProvider} based on the passed job class
* and classpath.
*
* @param jobClassName The job's class name.
* @param classpath The classpath the job class should be part of.
* @return The {@code FromClasspathEntryClassInformationProvider} instances collecting the
* necessary information.
* @throws IOException If some Jar listed on the classpath wasn't accessible.
* @throws FlinkException If the passed job class is not present on the passed classpath.
*/
public static FromClasspathEntryClassInformationProvider create(
String jobClassName, Iterable<URL> classpath) throws IOException, FlinkException {
Preconditions.checkNotNull(jobClassName, "No job class name passed.");
Preconditions.checkNotNull(classpath, "No classpath passed.");
if (!userClasspathContainsJobClass(jobClassName, classpath)) {
throw new FlinkException(
String.format(
"Could not find the provided job class (%s) in the user lib directory.",
jobClassName));
}
return new FromClasspathEntryClassInformationProvider(jobClassName);
}
/**
* Creates a {@code FromClasspathEntryClassInformationProvider} looking for the entry class
* providing the main method on the passed classpath.
*
* @param classpath The classpath the job class is expected to be part of.
* @return The {@code FromClasspathEntryClassInformationProvider} providing the job class found
* on the passed classpath.
* @throws IOException If some Jar listed on the classpath wasn't accessible.
* @throws FlinkException Either no or too many main methods were found on the classpath.
*/
public static FromClasspathEntryClassInformationProvider createFromClasspath(
Iterable<URL> classpath) throws IOException, FlinkException {
return new FromClasspathEntryClassInformationProvider(
extractJobClassFromUrlClasspath(classpath));
}
/**
* Creates a {@code FromClasspathEntryClassInformationProvider} looking for the entry class
* providing the main method on the system classpath.
*
* @return The {@code FromClasspathEntryClassInformationProvider} providing the job class found
* on the system classpath.
* @throws IOException If some Jar listed on the system classpath wasn't accessible.
* @throws FlinkException Either no or too many main methods were found on the system classpath.
*/
public static FromClasspathEntryClassInformationProvider createFromSystemClasspath()
throws IOException, FlinkException {
return new FromClasspathEntryClassInformationProvider(extractJobClassFromSystemClasspath());
}
/**
* Creates a {@code FromClasspathEntryClassInformationProvider} assuming that the passed job
* class is available on the system classpath.
*
* @param jobClassName The job class name working as the entry point.
* @return The {@code FromClasspathEntryClassInformationProvider} providing the job class found.
*/
public static FromClasspathEntryClassInformationProvider
createWithJobClassAssumingOnSystemClasspath(String jobClassName) {
return new FromClasspathEntryClassInformationProvider(jobClassName);
}
private FromClasspathEntryClassInformationProvider(String jobClassName) {
this.jobClassName = Preconditions.checkNotNull(jobClassName, "No job class name set.");
}
/**
* Always returns an empty {@code Optional} because this implementation relies on the JAR
* archive being available on either the user or the system classpath.
*
* @return An empty {@code Optional}.
*/
@Override
public Optional<File> getJarFile() {
return Optional.empty();
}
/**
* Returns the job class name if it could be derived from the specified classpath or was
* explicitly specified.
*
* @return The job class name or an empty {@code Optional} if none was specified and it couldn't
* be derived from the classpath.
*/
@Override
public Optional<String> getJobClassName() {
return Optional.of(jobClassName);
}
@VisibleForTesting
static Iterable<File> extractSystemClasspath() {
final String classpathPropertyValue = System.getProperty("java.class.path", "");
final String pathSeparator = System.getProperty("path.separator", ":");
return Arrays.stream(classpathPropertyValue.split(pathSeparator))
.filter(entry -> !StringUtils.isNullOrWhitespaceOnly(entry))
.map(File::new)
.filter(File::isFile)
.filter(f -> isJarFilename(f.getName()))
.collect(Collectors.toList());
}
private static String extractJobClassFromSystemClasspath() throws FlinkException, IOException {
return extractJobClassNameFromFileClasspath(extractSystemClasspath());
}
private static String extractJobClassFromUrlClasspath(Iterable<URL> classpath)
throws IOException, FlinkException {
final List<File> jarFilesFromClasspath =
StreamSupport.stream(classpath.spliterator(), false)
.map(url -> new File(url.getFile()))
.filter(f -> isJarFilename(f.getName()))
.collect(Collectors.toList());
return extractJobClassNameFromFileClasspath(jarFilesFromClasspath);
}
private static String extractJobClassNameFromFileClasspath(Iterable<File> classpath)
throws FlinkException, IOException {
try {
return JarManifestParser.findOnlyEntryClass(classpath).getEntryClass();
} catch (NoSuchElementException e) {
throw new FlinkException(
"No JAR found on classpath. Please provide a JAR explicitly.", e);
} catch (IllegalArgumentException e) {
throw new FlinkException(
"Multiple JAR archives with entry classes found on classpath. Please provide an entry class name.",
e);
}
}
private static boolean userClasspathContainsJobClass(
String jobClassName, Iterable<URL> classpath) throws IOException {
for (URL url : classpath) {
if (!isJarFile(url)) {
continue;
}
try (final JarFile jarFile = new JarFile(url.getFile())) {
if (jarContainsJobClass(jobClassName, jarFile)) {
return true;
}
}
}
return false;
}
private static boolean jarContainsJobClass(String jobClassName, JarFile jarFile) {
return jarFile.stream()
.map(JarEntry::getName)
.filter(fileName -> fileName.endsWith(FileUtils.CLASS_FILE_EXTENSION))
.map(FileUtils::stripFileExtension)
.map(
fileName ->
fileName.replaceAll(
Pattern.quote(File.separator), FileUtils.PACKAGE_SEPARATOR))
.anyMatch(name -> name.equals(jobClassName));
}
private static boolean isJarFile(URL url) {
return isJarFilename(url.getFile());
}
private static boolean isJarFilename(String filename) {
return filename.endsWith(".jar");
}
}
| |
//================================================================================
//Copyright (c) 2012, David Yu
//All rights reserved.
//--------------------------------------------------------------------------------
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// 3. Neither the name of protostuff nor the names of its contributors may be used
// to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//================================================================================
package com.dyuproject.protostuff.runtime;
import static com.dyuproject.protostuff.runtime.RuntimeFieldFactory.ID_THROWABLE;
import static com.dyuproject.protostuff.runtime.RuntimeFieldFactory.STR_THROWABLE;
import java.io.IOException;
import com.dyuproject.protostuff.GraphInput;
import com.dyuproject.protostuff.Input;
import com.dyuproject.protostuff.Output;
import com.dyuproject.protostuff.Pipe;
import com.dyuproject.protostuff.ProtostuffException;
import com.dyuproject.protostuff.Schema;
import com.dyuproject.protostuff.StatefulOutput;
/**
* Used when the type is assignable from {@link java.lang.Throwable}.
*
* @author David Yu
* @created May 2, 2012
*/
public abstract class PolymorphicThrowableSchema extends PolymorphicSchema
{
static final java.lang.reflect.Field __cause;
static
{
java.lang.reflect.Field cause = null;
try
{
cause = Throwable.class.getDeclaredField("cause");
cause.setAccessible(true);
}
catch (Exception e)
{
cause = null;
}
__cause = cause;
}
static String name(int number)
{
return number == ID_THROWABLE ? STR_THROWABLE : null;
}
static int number(String name)
{
return name.length() == 1 && name.charAt(0) == 'Z' ? ID_THROWABLE : 0;
}
protected final Pipe.Schema<Object> pipeSchema = new Pipe.Schema<Object>(this)
{
protected void transfer(Pipe pipe, Input input, Output output) throws IOException
{
transferObject(this, pipe, input, output, strategy);
}
};
public PolymorphicThrowableSchema(IdStrategy strategy)
{
super(strategy);
}
public Pipe.Schema<Object> getPipeSchema()
{
return pipeSchema;
}
public String getFieldName(int number)
{
return name(number);
}
public int getFieldNumber(String name)
{
return number(name);
}
public String messageFullName()
{
return Throwable.class.getName();
}
public String messageName()
{
return Throwable.class.getSimpleName();
}
public void mergeFrom(Input input, Object owner) throws IOException
{
setValue(readObjectFrom(input, this, owner, strategy), owner);
}
public void writeTo(Output output, Object value) throws IOException
{
writeObjectTo(output, value, this, strategy);
}
@SuppressWarnings("unchecked")
static void writeObjectTo(Output output, Object value, Schema<?> currentSchema,
IdStrategy strategy) throws IOException
{
final Schema<Object> schema = strategy.writePojoIdTo(output, ID_THROWABLE,
(Class<Object>)value.getClass());
if(output instanceof StatefulOutput)
{
// update using the derived schema.
((StatefulOutput)output).updateLast(schema, currentSchema);
}
if(tryWriteWithoutCause(output, value, schema))
return;
schema.writeTo(output, value);
}
static boolean tryWriteWithoutCause(Output output, Object value,
Schema<Object> schema) throws IOException
{
if(schema instanceof MappedSchema && __cause != null)
{
// ignore the field "cause" if its references itself (cyclic)
final MappedSchema<Object> ms = (MappedSchema<Object>)schema;
if(ms.fields.length > 1 && ms.fields[1].name.equals("cause"))
{
final Object cause;
try
{
cause = __cause.get(value);
}
catch (IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
if(cause == value)
{
// its cyclic, skip the second field "cause"
ms.fields[0].writeTo(output, value);
for(int i = 2, len = ms.fields.length; i < len; i++)
ms.fields[i].writeTo(output, value);
return true;
}
}
}
return false;
}
static Object readObjectFrom(Input input, Schema<?> schema, Object owner,
IdStrategy strategy) throws IOException
{
final int number = input.readFieldNumber(schema);
if(number != ID_THROWABLE)
throw new ProtostuffException("Corrupt input.");
return readObjectFrom(input, schema, owner, strategy, number);
}
static Object readObjectFrom(Input input, Schema<?> schema, Object owner,
IdStrategy strategy, int number) throws IOException
{
final Schema<Object> derivedSchema = strategy.resolvePojoFrom(
input, number).getSchema();
final Object pojo = derivedSchema.newMessage();
if(input instanceof GraphInput)
{
// update the actual reference.
((GraphInput)input).updateLast(pojo, owner);
}
if(__cause != null)
{
final Object cause;
try
{
cause = __cause.get(pojo);
}
catch (IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
if(cause == null)
{
// was not written because it was cyclic
// so we set it here manually for correctness
try
{
__cause.set(pojo, cause);
}
catch (IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
}
}
derivedSchema.mergeFrom(input, pojo);
return pojo;
}
static void transferObject(Pipe.Schema<Object> pipeSchema, Pipe pipe,
Input input, Output output, IdStrategy strategy) throws IOException
{
final int number = input.readFieldNumber(pipeSchema.wrappedSchema);
if(number != ID_THROWABLE)
throw new ProtostuffException("Corrupt input.");
transferObject(pipeSchema, pipe, input, output, strategy, number);
}
static void transferObject(Pipe.Schema<Object> pipeSchema, Pipe pipe,
Input input, Output output, IdStrategy strategy, int number) throws IOException
{
final Pipe.Schema<Object> derivedPipeSchema = strategy.transferPojoId(
input, output, number).getPipeSchema();
if(output instanceof StatefulOutput)
{
// update using the derived schema.
((StatefulOutput)output).updateLast(derivedPipeSchema, pipeSchema);
}
Pipe.transferDirect(derivedPipeSchema, pipe, input, output);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowrun;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.TimelineDocument;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineMetricSubDoc;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* This doc represents the flow run information for every job.
*/
public class FlowRunDocument implements TimelineDocument<FlowRunDocument> {
private static final Logger LOG = LoggerFactory
.getLogger(FlowRunDocument.class);
private String id;
private final String type = TimelineEntityType.YARN_FLOW_RUN.toString();
private String clusterId;
private String username;
private String flowName;
private Long flowRunId;
private String flowVersion;
private long minStartTime;
private long maxEndTime;
private final Map<String, TimelineMetricSubDoc>
metrics = new HashMap<>();
public FlowRunDocument() {
}
public FlowRunDocument(TimelineCollectorContext collectorContext,
Set<TimelineMetric> metrics) {
this.clusterId = collectorContext.getClusterId();
this.username = collectorContext.getUserId();
this.flowName = collectorContext.getFlowName();
this.flowRunId = collectorContext.getFlowRunId();
transformMetrics(metrics);
}
private void transformMetrics(Set<TimelineMetric> timelineMetrics) {
for (TimelineMetric metric : timelineMetrics) {
TimelineMetricSubDoc metricSubDoc = new TimelineMetricSubDoc(metric);
this.metrics.put(metric.getId(), metricSubDoc);
}
}
/**
* Merge the {@link FlowRunDocument} that is passed with the current
* document for upsert.
*
* @param flowRunDoc
* that has to be merged
*/
@Override
public void merge(FlowRunDocument flowRunDoc) {
if (flowRunDoc.getMinStartTime() > 0) {
this.minStartTime = flowRunDoc.getMinStartTime();
}
if (flowRunDoc.getMaxEndTime() > 0) {
this.maxEndTime = flowRunDoc.getMaxEndTime();
}
this.clusterId = flowRunDoc.getClusterId();
this.flowName = flowRunDoc.getFlowName();
this.id = flowRunDoc.getId();
this.username = flowRunDoc.getUsername();
this.flowVersion = flowRunDoc.getFlowVersion();
this.flowRunId = flowRunDoc.getFlowRunId();
aggregateMetrics(flowRunDoc.getMetrics());
}
private void aggregateMetrics(
Map<String, TimelineMetricSubDoc> metricSubDocMap) {
for(Map.Entry<String, TimelineMetricSubDoc> metricEntry :
metricSubDocMap.entrySet()) {
final String metricId = metricEntry.getKey();
final TimelineMetricSubDoc metricValue = metricEntry.getValue();
if (this.metrics.containsKey(metricId)) {
TimelineMetric incomingMetric =
metricValue.fetchTimelineMetric();
TimelineMetric baseMetric =
this.metrics.get(metricId).fetchTimelineMetric();
if (incomingMetric.getValues().size() > 0) {
baseMetric = aggregate(incomingMetric, baseMetric);
this.metrics.put(metricId, new TimelineMetricSubDoc(baseMetric));
} else {
LOG.debug("No incoming metric to aggregate for : {}",
baseMetric.getId());
}
} else {
this.metrics.put(metricId, metricValue);
}
}
}
private TimelineMetric aggregate(TimelineMetric incomingMetric,
TimelineMetric baseMetric) {
switch (baseMetric.getRealtimeAggregationOp()) {
case SUM:
baseMetric = TimelineMetricOperation.SUM
.aggregate(incomingMetric, baseMetric, null);
break;
case AVG:
baseMetric = TimelineMetricOperation.AVG
.aggregate(incomingMetric, baseMetric, null);
break;
case MAX:
baseMetric = TimelineMetricOperation.MAX
.aggregate(incomingMetric, baseMetric, null);
break;
case REPLACE:
baseMetric = TimelineMetricOperation.REPLACE
.aggregate(incomingMetric, baseMetric, null);
default:
LOG.warn("Unknown TimelineMetricOperation: {}",
baseMetric.getRealtimeAggregationOp());
}
return baseMetric;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getClusterId() {
return clusterId;
}
public void setClusterId(String clusterId) {
this.clusterId = clusterId;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getFlowName() {
return flowName;
}
public void setFlowName(String flowName) {
this.flowName = flowName;
}
public Long getFlowRunId() {
return flowRunId;
}
public void setFlowRunId(Long flowRunId) {
this.flowRunId = flowRunId;
}
public Map<String, TimelineMetricSubDoc> getMetrics() {
return metrics;
}
public void setMetrics(Map<String, TimelineMetricSubDoc> metrics) {
this.metrics.putAll(metrics);
}
public Set<TimelineMetric> fetchTimelineMetrics() {
Set<TimelineMetric> metricSet = new HashSet<>();
for(TimelineMetricSubDoc metricSubDoc : metrics.values()) {
metricSet.add(metricSubDoc.fetchTimelineMetric());
}
return metricSet;
}
public long getMinStartTime() {
return minStartTime;
}
public void setMinStartTime(long minStartTime) {
this.minStartTime = minStartTime;
}
public long getMaxEndTime() {
return maxEndTime;
}
public void setMaxEndTime(long maxEndTime) {
this.maxEndTime = maxEndTime;
}
@Override
public String getType() {
return type;
}
@Override
public long getCreatedTime() {
return minStartTime;
}
@Override
public void setCreatedTime(long createdTime) {
if(minStartTime == 0) {
minStartTime = createdTime;
}
}
public String getFlowVersion() {
return flowVersion;
}
public void setFlowVersion(String flowVersion) {
this.flowVersion = flowVersion;
}
}
| |
package com.citelic.game.entity.player.content.actions.skills.magic;
import com.citelic.GameConstants;
import com.citelic.cache.impl.item.ItemDefinitions;
import com.citelic.game.engine.Engine;
import com.citelic.game.engine.task.EngineTask;
import com.citelic.game.engine.task.EngineTaskManager;
import com.citelic.game.entity.Animation;
import com.citelic.game.entity.Graphics;
import com.citelic.game.entity.player.Player;
import com.citelic.game.entity.player.content.actions.HomeTeleport;
import com.citelic.game.entity.player.content.actions.skills.Skills;
import com.citelic.game.entity.player.content.controllers.impl.distractions.clanwars.FfaZone;
import com.citelic.game.entity.player.content.controllers.impl.distractions.clanwars.RequestController;
import com.citelic.game.entity.player.content.controllers.impl.distractions.pvp.Wilderness;
import com.citelic.game.entity.player.item.Item;
import com.citelic.game.entity.player.item.ItemConstants;
import com.citelic.game.map.tile.Tile;
import com.citelic.networking.codec.decode.WorldPacketsDecoder;
import com.citelic.utility.Utilities;
/*
* content package used for static stuff
*/
public class Magic {
public static final int MAGIC_TELEPORT = 0, ITEM_TELEPORT = 1,
OBJECT_TELEPORT = 2;
@SuppressWarnings("unused")
private static final int AIR_RUNE = 556, WATER_RUNE = 555,
EARTH_RUNE = 557, FIRE_RUNE = 554, MIND_RUNE = 558,
NATURE_RUNE = 561, CHAOS_RUNE = 562, DEATH_RUNE = 560,
BLOOD_RUNE = 565, SOUL_RUNE = 566, ASTRAL_RUNE = 9075,
LAW_RUNE = 563, STEAM_RUNE = 4694, MIST_RUNE = 4695,
DUST_RUNE = 4696, SMOKE_RUNE = 4697, MUD_RUNE = 4698,
LAVA_RUNE = 4699, ARMADYL_RUNE = 21773;
private final static Tile[] TABS = { new Tile(3217, 3426, 0),
new Tile(3222, 3218, 0), new Tile(2965, 3379, 0),
new Tile(2758, 3478, 0), new Tile(2660, 3306, 0) };
public static final void castMiscellaneousSpell(Player player, int spellId,
int itemId) {
if (!ItemConstants.isTradeable(new Item(itemId))) {
player.getPackets().sendGameMessage(
"You cannot use this spell on an un-tradeable item.");
return;
}
switch (spellId) {
case 59:
Alching.castAlchemy(player, itemId, false);
break;
case 50:
Superheating.processSuperHeat(player, itemId);
break;
case 38:
Alching.castAlchemy(player, itemId, true);
break;
}
}
public static final boolean checkCombatSpell(Player player, int spellId,
int set, boolean delete) {
if (spellId == 65535)
return true;
switch (player.getCombatDefinitions().getSpellBook()) {
case 193:
switch (spellId) {
case 28:
if (!checkSpellRequirements(player, 50, delete, CHAOS_RUNE, 2,
DEATH_RUNE, 2, FIRE_RUNE, 1, AIR_RUNE, 1))
return false;
break;
case 32:
if (!checkSpellRequirements(player, 52, delete, CHAOS_RUNE, 2,
DEATH_RUNE, 2, AIR_RUNE, 1, SOUL_RUNE, 1))
return false;
break;
case 24:
if (!checkSpellRequirements(player, 56, delete, CHAOS_RUNE, 2,
DEATH_RUNE, 2, BLOOD_RUNE, 1))
return false;
break;
case 20:
if (!checkSpellRequirements(player, 58, delete, CHAOS_RUNE, 2,
DEATH_RUNE, 2, WATER_RUNE, 2))
return false;
break;
case 30:
if (!checkSpellRequirements(player, 62, delete, CHAOS_RUNE, 4,
DEATH_RUNE, 2, FIRE_RUNE, 2, AIR_RUNE, 2))
return false;
break;
case 34:
if (!checkSpellRequirements(player, 64, delete, CHAOS_RUNE, 4,
DEATH_RUNE, 2, AIR_RUNE, 1, SOUL_RUNE, 2))
return false;
break;
case 26:
if (!checkSpellRequirements(player, 68, delete, CHAOS_RUNE, 4,
DEATH_RUNE, 2, BLOOD_RUNE, 2))
return false;
break;
case 22:
if (!checkSpellRequirements(player, 70, delete, CHAOS_RUNE, 4,
DEATH_RUNE, 2, WATER_RUNE, 4))
return false;
break;
case 29:
if (!checkSpellRequirements(player, 74, delete, DEATH_RUNE, 2,
BLOOD_RUNE, 2, FIRE_RUNE, 2, AIR_RUNE, 2))
return false;
break;
case 33:
if (!checkSpellRequirements(player, 76, delete, DEATH_RUNE, 2,
BLOOD_RUNE, 2, AIR_RUNE, 2, SOUL_RUNE, 2))
return false;
break;
case 25:
if (!checkSpellRequirements(player, 80, delete, DEATH_RUNE, 2,
BLOOD_RUNE, 4))
return false;
break;
case 21:
if (!checkSpellRequirements(player, 82, delete, DEATH_RUNE, 2,
BLOOD_RUNE, 2, WATER_RUNE, 3))
return false;
break;
case 31:
if (!checkSpellRequirements(player, 86, delete, DEATH_RUNE, 4,
BLOOD_RUNE, 2, FIRE_RUNE, 4, AIR_RUNE, 4))
return false;
break;
case 35:
if (!checkSpellRequirements(player, 88, delete, DEATH_RUNE, 4,
BLOOD_RUNE, 2, AIR_RUNE, 4, SOUL_RUNE, 3))
return false;
break;
case 27:
if (!checkSpellRequirements(player, 92, delete, DEATH_RUNE, 4,
BLOOD_RUNE, 4, SOUL_RUNE, 1))
return false;
break;
case 23:
if (!checkSpellRequirements(player, 94, delete, DEATH_RUNE, 4,
BLOOD_RUNE, 2, WATER_RUNE, 6))
return false;
break;
case 36: // Miasmic rush.
if (!checkSpellRequirements(player, 61, delete, CHAOS_RUNE, 2,
EARTH_RUNE, 1, SOUL_RUNE, 1)) {
return false;
}
int weaponId = player.getEquipment().getWeaponId();
if (weaponId != 13867 && weaponId != 13869 && weaponId != 13941
&& weaponId != 13943) {
player.getPackets().sendGameMessage(
"You need a Zuriel's staff to cast this spell.");
player.getPackets()
.sendGameMessage(
"Extreme donators can cast Miasmic spells without Zuriel's staff.");
return false;
}
break;
case 38: // Miasmic burst.
if (!checkSpellRequirements(player, 73, delete, CHAOS_RUNE, 4,
EARTH_RUNE, 2, SOUL_RUNE, 2)) {
return false;
}
weaponId = player.getEquipment().getWeaponId();
if (weaponId != 13867 && weaponId != 13869 && weaponId != 13941
&& weaponId != 13943) {
player.getPackets().sendGameMessage(
"You need a Zuriel's staff to cast this spell.");
player.getPackets()
.sendGameMessage(
"Extreme donators can cast Miasmic spells without Zuriel's staff.");
return false;
}
break;
case 37: // Miasmic blitz.
if (!checkSpellRequirements(player, 85, delete, BLOOD_RUNE, 2,
EARTH_RUNE, 3, SOUL_RUNE, 3)) {
return false;
}
weaponId = player.getEquipment().getWeaponId();
if (weaponId != 13867 && weaponId != 13869 && weaponId != 13941
&& weaponId != 13943) {
player.getPackets().sendGameMessage(
"You need a Zuriel's staff to cast this spell.");
player.getPackets()
.sendGameMessage(
"Extreme donators can cast Miasmic spells without Zuriel's staff.");
return false;
}
break;
case 39: // Miasmic barrage.
if (!checkSpellRequirements(player, 97, delete, BLOOD_RUNE, 4,
EARTH_RUNE, 4, SOUL_RUNE, 4)) {
return false;
}
weaponId = player.getEquipment().getWeaponId();
if (weaponId != 13867 && weaponId != 13869 && weaponId != 13941
&& weaponId != 13943) {
player.getPackets().sendGameMessage(
"You need a Zuriel's staff to cast this spell.");
player.getPackets()
.sendGameMessage(
"Extreme donators can cast Miasmic spells without Zuriel's staff.");
return false;
}
break;
default:
return false;
}
break;
case 192:
switch (spellId) {
case 98:
if (!checkSpellRequirements(player, 1, delete, AIR_RUNE, 2))
return false;
break;
case 25:
if (!checkSpellRequirements(player, 1, delete, AIR_RUNE, 1,
MIND_RUNE, 1))
return false;
break;
case 28:
if (!checkSpellRequirements(player, 5, delete, WATER_RUNE, 1,
AIR_RUNE, 1, MIND_RUNE, 1))
return false;
break;
case 30:
if (!checkSpellRequirements(player, 9, delete, EARTH_RUNE, 2,
AIR_RUNE, 1, MIND_RUNE, 1))
return false;
break;
case 32:
if (!checkSpellRequirements(player, 13, delete, FIRE_RUNE, 3,
AIR_RUNE, 2, MIND_RUNE, 1))
return false;
break;
case 33:
if (!checkSpellRequirements(player, 15, delete, EARTH_RUNE, 2,
WATER_RUNE, 2, NATURE_RUNE, 1))
return false;
break;
case 65:
if (!checkSpellRequirements(player, 60, delete, EARTH_RUNE, 4,
WATER_RUNE, 4, NATURE_RUNE, 2))
return false;
break;
case 34: // air bolt
if (!checkSpellRequirements(player, 17, delete, AIR_RUNE, 2,
CHAOS_RUNE, 1))
return false;
break;
case 36:// bind
if (!checkSpellRequirements(player, 20, delete, EARTH_RUNE, 3,
WATER_RUNE, 3, NATURE_RUNE, 2))
return false;
break;
case 55: // snare
if (!checkSpellRequirements(player, 50, delete, EARTH_RUNE, 4,
WATER_RUNE, 4, NATURE_RUNE, 3))
return false;
break;
case 81:// entangle
if (!checkSpellRequirements(player, 79, delete, EARTH_RUNE, 5,
WATER_RUNE, 5, NATURE_RUNE, 4))
return false;
break;
case 39: // water bolt
if (!checkSpellRequirements(player, 23, delete, WATER_RUNE, 2,
AIR_RUNE, 2, CHAOS_RUNE, 1))
return false;
break;
case 42: // earth bolt
if (!checkSpellRequirements(player, 29, delete, EARTH_RUNE, 3,
AIR_RUNE, 2, CHAOS_RUNE, 1))
return false;
break;
case 45: // fire bolt
if (!checkSpellRequirements(player, 35, delete, FIRE_RUNE, 4,
AIR_RUNE, 3, CHAOS_RUNE, 1))
return false;
break;
case 49: // air blast
if (!checkSpellRequirements(player, 41, delete, AIR_RUNE, 3,
DEATH_RUNE, 1))
return false;
break;
case 52: // water blast
if (!checkSpellRequirements(player, 47, delete, WATER_RUNE, 3,
AIR_RUNE, 3, DEATH_RUNE, 1))
return false;
break;
case 58: // earth blast
if (!checkSpellRequirements(player, 53, delete, EARTH_RUNE, 4,
AIR_RUNE, 3, DEATH_RUNE, 1))
return false;
break;
case 63: // fire blast
if (!checkSpellRequirements(player, 59, delete, FIRE_RUNE, 5,
AIR_RUNE, 4, DEATH_RUNE, 1))
return false;
break;
case 70: // air wave
if (!checkSpellRequirements(player, 62, delete, AIR_RUNE, 5,
BLOOD_RUNE, 1))
return false;
break;
case 73: // water wave
if (!checkSpellRequirements(player, 65, delete, WATER_RUNE, 7,
AIR_RUNE, 5, BLOOD_RUNE, 1))
return false;
break;
case 77: // earth wave
if (!checkSpellRequirements(player, 70, delete, EARTH_RUNE, 7,
AIR_RUNE, 5, BLOOD_RUNE, 1))
return false;
break;
case 79: // teleother lumbridge
if (!checkSpellRequirements(player, 74, delete, SOUL_RUNE, 1,
LAW_RUNE, 1, EARTH_RUNE, 1))
return false;
break;
case 85: // teleother falador
if (!checkSpellRequirements(player, 82, delete, SOUL_RUNE, 1,
LAW_RUNE, 1, WATER_RUNE, 1))
return false;
break;
case 90: // teleother camelot
if (!checkSpellRequirements(player, 90, delete, SOUL_RUNE, 2,
LAW_RUNE, 1))
return false;
break;
case 80: // fire wave
if (!checkSpellRequirements(player, 75, delete, FIRE_RUNE, 7,
AIR_RUNE, 5, BLOOD_RUNE, 1))
return false;
break;
case 84:
if (!checkSpellRequirements(player, 81, delete, AIR_RUNE, 7,
DEATH_RUNE, 1, BLOOD_RUNE, 1))
return false;
break;
case 87:
if (!checkSpellRequirements(player, 85, delete, WATER_RUNE, 10,
AIR_RUNE, 7, DEATH_RUNE, 1, BLOOD_RUNE, 1))
return false;
break;
case 89:
if (!checkSpellRequirements(player, 85, delete, EARTH_RUNE, 10,
AIR_RUNE, 7, DEATH_RUNE, 1, BLOOD_RUNE, 1))
return false;
break;
case 66: // Sara Strike
if (player.getEquipment().getWeaponId() != 2415) {
player.getPackets()
.sendGameMessage(
"You need to be equipping a Saradomin staff to cast this spell.",
true);
return false;
}
if (!checkSpellRequirements(player, 60, delete, AIR_RUNE, 4,
FIRE_RUNE, 1, BLOOD_RUNE, 2))
return false;
break;
case 67: // Guthix Claws
if (player.getEquipment().getWeaponId() != 2416) {
player.getPackets()
.sendGameMessage(
"You need to be equipping a Guthix Staff or Void Mace to cast this spell.",
true);
return false;
}
if (!checkSpellRequirements(player, 60, delete, AIR_RUNE, 4,
FIRE_RUNE, 1, BLOOD_RUNE, 2))
return false;
break;
case 68: // Flame of Zammy
if (player.getEquipment().getWeaponId() != 2417) {
player.getPackets()
.sendGameMessage(
"You need to be equipping a Zamorak Staff to cast this spell.",
true);
return false;
}
if (!checkSpellRequirements(player, 60, delete, AIR_RUNE, 4,
FIRE_RUNE, 4, BLOOD_RUNE, 2))
return false;
break;
case 91:
if (!checkSpellRequirements(player, 85, delete, FIRE_RUNE, 10,
AIR_RUNE, 7, DEATH_RUNE, 1, BLOOD_RUNE, 1))
return false;
break;
case 86: // teleblock
if (!checkSpellRequirements(player, 85, delete, CHAOS_RUNE, 1,
LAW_RUNE, 1, DEATH_RUNE, 1))
return false;
break;
case 99: // Storm of Armadyl
if (!checkSpellRequirements(player, 77, delete, ARMADYL_RUNE, 1))
return false;
break;
default:
return false;
}
break;
default:
return false;
}
if (set >= 0) {
if (set == 0) {
player.getCombatDefinitions().setAutoCastSpell(spellId);
player.getPackets().sendGameMessage("Autocast spell selected.");
} else {
player.getTemporaryAttributtes().put("tempCastSpell", spellId);
}
}
return true;
}
public static final boolean checkRunes(Player player, boolean delete,
int... runes) {
int weaponId = player.getEquipment().getWeaponId();
int shieldId = player.getEquipment().getShieldId();
int runesCount = 0;
while (runesCount < runes.length) {
int runeId = runes[runesCount++];
int ammount = runes[runesCount++];
if (hasInfiniteRunes(runeId, weaponId, shieldId))
continue;
if (hasStaffOfLight(weaponId) && Utilities.getRandom(8) == 0
&& runeId != 21773)// 1
// in
// eight
// chance
// of
// keeping
// runes
continue;
if (!player.getInventory().containsItem(runeId, ammount)) {
player.getPackets().sendGameMessage(
"You do not have enough "
+ ItemDefinitions.getItemDefinitions(runeId)
.getName().replace("rune", "Rune")
+ "s to cast this spell.");
return false;
}
}
if (delete) {
runesCount = 0;
while (runesCount < runes.length) {
int runeId = runes[runesCount++];
int ammount = runes[runesCount++];
if (hasInfiniteRunes(runeId, weaponId, shieldId))
continue;
player.getInventory().deleteItem(runeId, ammount);
}
}
return true;
}
public static final boolean checkSpellRequirements(Player player,
int level, boolean delete, int... runes) {
if (player.getSkills().getLevelForXp(Skills.MAGIC) < level) {
player.getPackets().sendGameMessage(
"Your Magic level is not high enough for this spell.");
return false;
}
return checkRunes(player, delete, runes);
}
public static final boolean hasInfiniteRunes(int runeId, int weaponId,
int shieldId) {
if (runeId == AIR_RUNE) {
if (weaponId == 1381 || weaponId == 21777 || weaponId == 1397
|| weaponId == 1405) // air
// staff
return true;
} else if (runeId == WATER_RUNE) {
if (weaponId == 1383 || shieldId == 18346 || weaponId == 1395
|| weaponId == 1403 || weaponId == 6562 || weaponId == 6563
|| weaponId == 11736 || weaponId == 11738) // water
// staff
return true;
} else if (runeId == EARTH_RUNE) {
if (weaponId == 1385 || weaponId == 1399 || weaponId == 1407
|| weaponId == 3053 || weaponId == 3054 || weaponId == 6562
|| weaponId == 6563) // earth
// staff
return true;
} else if (runeId == FIRE_RUNE) {
if (weaponId == 1387 || weaponId == 1387 || weaponId == 1401
|| weaponId == 3053 || weaponId == 3054
|| weaponId == 11736 || weaponId == 11738) // fire
// staff
return true;
}
return false;
}
public static boolean hasStaffOfLight(int weaponId) {
if (weaponId == 15486 || weaponId == 22207 || weaponId == 22209
|| weaponId == 22211 || weaponId == 22213)
return true;
return false;
}
static boolean isWearingFireStaff(int weaponId) {
if (weaponId == 1387 || weaponId == 1393 || weaponId == 1401
|| weaponId == 3053 || weaponId == 3054)
return true;
return false;
}
public static final void processAncientSpell(Player player, int spellId,
int packetId) {
player.stopAll(false);
switch (spellId) {
case 28:
case 32:
case 24:
case 20:
case 30:
case 34:
case 26:
case 22:
case 29:
case 33:
case 25:
case 21:
case 31:
case 35:
case 27:
case 23:
case 36:
case 37:
case 38:
case 39:
setCombatSpell(player, spellId);
break;
case 40:
sendAncientTeleportSpell(player, 54, 64, new Tile(3099, 9882, 0),
LAW_RUNE, 2, FIRE_RUNE, 1, AIR_RUNE, 1);
break;
case 41:
sendAncientTeleportSpell(player, 60, 70, new Tile(3222, 3336, 0),
LAW_RUNE, 2, SOUL_RUNE, 1);
break;
case 42:
sendAncientTeleportSpell(player, 66, 76, new Tile(3492, 3471, 0),
LAW_RUNE, 2, BLOOD_RUNE, 1);
break;
case 43:
sendAncientTeleportSpell(player, 72, 82, new Tile(3006, 3471, 0),
LAW_RUNE, 2, WATER_RUNE, 4);
break;
case 44:
sendAncientTeleportSpell(player, 78, 88, new Tile(2990, 3696, 0),
LAW_RUNE, 2, FIRE_RUNE, 3, AIR_RUNE, 2);
break;
case 45:
sendAncientTeleportSpell(player, 84, 94, new Tile(3217, 3677, 0),
LAW_RUNE, 2, SOUL_RUNE, 2);
break;
case 46:
sendAncientTeleportSpell(player, 90, 100, new Tile(3288, 3886, 0),
LAW_RUNE, 2, BLOOD_RUNE, 2);
break;
case 47:
sendAncientTeleportSpell(player, 96, 106, new Tile(2977, 3873, 0),
LAW_RUNE, 2, WATER_RUNE, 8);
break;
case 48:
switch (packetId) {
case WorldPacketsDecoder.ACTION_BUTTON1_PACKET:
useHomeTele(player);
break;
case WorldPacketsDecoder.ACTION_BUTTON2_PACKET:
if (player.getLodeStoneTile() == null) {
player.getPackets()
.sendGameMessage(
"You need to teleport somewhere before you can use this option.");
return;
}
player.getActionManager().setAction(
new HomeTeleport(player.getLodeStoneTile()));
break;
}
break;
}
}
public static final void processLunarSpell(Player player, int spellId,
int packetId) {
player.stopAll(false);
switch (spellId) {
case 37:
if (player.getSkills().getLevel(Skills.MAGIC) < 94) {
player.getPackets().sendGameMessage(
"Your Magic level is not high enough for this spell.");
return;
} else if (player.getSkills().getLevel(Skills.DEFENCE) < 40) {
player.getPackets().sendGameMessage(
"You need a Defence level of 40 for this spell");
return;
}
Long lastVeng = (Long) player.getTemporaryAttributtes().get(
"LAST_VENG");
if (lastVeng != null
&& lastVeng + 30000 > Utilities.currentTimeMillis()) {
player.getPackets()
.sendGameMessage(
"Players may only cast vengeance once every 30 seconds.");
return;
}
if (!checkRunes(player, true, ASTRAL_RUNE, 4, DEATH_RUNE, 2,
EARTH_RUNE, 10))
return;
player.setNextGraphics(new Graphics(726, 0, 100));
player.setNextAnimation(new Animation(4410));
player.setCastVeng(true);
player.getTemporaryAttributtes().put("LAST_VENG",
Utilities.currentTimeMillis());
player.getPackets().sendGameMessage("You cast a vengeance.");
break;
case 74:
Long lastVengGroup = (Long) player.getTemporaryAttributtes().get(
"LAST_VENGGROUP");
if (lastVengGroup != null
&& lastVengGroup + 30000 > Utilities.currentTimeMillis()) {
player.getPackets()
.sendGameMessage(
"Players may only cast vengeance group once every 30 seconds.");
return;
}
if (!player.isAtMultiArea()) {
player.getPackets().sendGameMessage(
"You can only cast vengeance group in a multi area.");
return;
}
if (player.getSkills().getLevel(Skills.MAGIC) < 95) {
player.getPackets()
.sendGameMessage(
"You need a level of 95 magic to cast vengeance group.");
return;
}
if (!player.getInventory().containsItem(560, 3)
&& !player.getInventory().containsItem(557, 11)
&& !player.getInventory().containsItem(9075, 4)) {
player.getPackets().sendGameMessage(
"You don't have enough runes to cast vengeance group.");
return;
}
int count = 0;
for (Player other : Engine.getPlayers()) {
if (other.withinDistance(player, 4) && other.isAtMultiArea()) {
other.getPackets()
.sendGameMessage(
"Someone cast the Group Vengeance spell and you were affected!");
other.setCastVeng(true);
other.setNextGraphics(new Graphics(725, 0, 100));
other.getTemporaryAttributtes().put("LAST_VENGGROU P",
Utilities.currentTimeMillis());
other.getTemporaryAttributtes().put("LAST_VENG",
Utilities.currentTimeMillis());
count++;
}
}
player.getPackets().sendGameMessage(
"The spell affected " + count + " nearby people.");
player.setNextGraphics(new Graphics(725, 0, 100));
player.setNextAnimation(new Animation(4410));
player.setCastVeng(true);
player.getTemporaryAttributtes().put("LAST_VENGGROUP",
Utilities.currentTimeMillis());
player.getTemporaryAttributtes().put("LAST_VENG",
Utilities.currentTimeMillis());
player.getInventory().deleteItem(560, 3);
player.getInventory().deleteItem(557, 11);
player.getInventory().deleteItem(9075, 4);
break;
case 39:
switch (packetId) {
case WorldPacketsDecoder.ACTION_BUTTON1_PACKET:
useHomeTele(player);
break;
case WorldPacketsDecoder.ACTION_BUTTON2_PACKET:
if (player.getLodeStoneTile() == null) {
player.getPackets()
.sendGameMessage(
"You need to teleport somewhere before you can use this option.");
return;
}
player.getActionManager().setAction(
new HomeTeleport(player.getLodeStoneTile()));
break;
}
break;
case 42:
if (player.getSkills().getLevel(Skills.MAGIC) < 93) {
player.getPackets().sendGameMessage(
"Your Magic level is not high enough for this spell.");
return;
} else if (player.getSkills().getLevel(Skills.DEFENCE) < 40) {
player.getPackets().sendGameMessage(
"You need a Defence level of 40 for this spell");
return;
}
if (!checkRunes(player, true, EARTH_RUNE, 11, DEATH_RUNE, 3,
ASTRAL_RUNE, 4))
return;
break;
}
}
public static final void processNormalSpell(Player player, int spellId,
int packetId) {
player.stopAll(false);
switch (spellId) {
case 98: // wind rush
case 25: // air strike
case 28: // water strike
case 30: // earth strike
case 32: // fire strike
case 34: // air bolt
case 39: // water bolt
case 42: // earth bolt
case 45: // fire bolt
case 49: // air blast
case 52: // water blast
case 58: // earth blast
case 63: // fire blast
case 70: // air wave
case 73: // water wave
case 77: // earth wave
case 80: // fire wave
case 99:
case 84:
case 87:
case 89:
case 91:
case 36:
case 55:
case 81:
case 66:
case 67:
case 68:
setCombatSpell(player, spellId);
break;
case 27: // crossbow bolt enchant
if (player.getSkills().getLevel(Skills.MAGIC) < 4) {
player.getPackets().sendGameMessage(
"Your Magic level is not high enough for this spell.");
return;
}
player.stopAll();
player.getInterfaceManager().sendInterface(432);
break;
case 33:
BonesToBananas.castBonesToBananas(player);
break;
case 65:
BonesToPeaches.castBonesToPeaches(player);
break;
case 24:
switch (packetId) {
case WorldPacketsDecoder.ACTION_BUTTON1_PACKET:
useHomeTele(player);
break;
case WorldPacketsDecoder.ACTION_BUTTON2_PACKET:
if (player.getLodeStoneTile() == null) {
player.getPackets()
.sendGameMessage(
"You need to teleport somewhere before you can use this option.");
return;
}
player.getActionManager().setAction(
new HomeTeleport(player.getLodeStoneTile()));
break;
}
break;
case 37: // mobi
sendNormalTeleportSpell(player, 10, 19, new Tile(2413, 2848, 0),
LAW_RUNE, 1, WATER_RUNE, 1, AIR_RUNE, 1);
break;
case 40: // varrock
sendNormalTeleportSpell(player, 25, 19, new Tile(3212, 3424, 0),
FIRE_RUNE, 1, AIR_RUNE, 3, LAW_RUNE, 1);
break;
case 43: // lumby
sendNormalTeleportSpell(player, 31, 41, new Tile(3222, 3218, 0),
EARTH_RUNE, 1, AIR_RUNE, 3, LAW_RUNE, 1);
break;
case 46: // fally
sendNormalTeleportSpell(player, 37, 48, new Tile(2964, 3379, 0),
WATER_RUNE, 1, AIR_RUNE, 3, LAW_RUNE, 1);
break;
case 51: // camelot
sendNormalTeleportSpell(player, 45, 55.5, new Tile(2757, 3478, 0),
AIR_RUNE, 5, LAW_RUNE, 1);
break;
case 57: // ardy
sendNormalTeleportSpell(player, 51, 61, new Tile(2664, 3305, 0),
WATER_RUNE, 2, LAW_RUNE, 2);
break;
case 62: // watch
sendNormalTeleportSpell(player, 58, 68, new Tile(2547, 3113, 2),
EARTH_RUNE, 2, LAW_RUNE, 2);
break;
case 69: // troll
sendNormalTeleportSpell(player, 61, 68, new Tile(2888, 3674, 0),
FIRE_RUNE, 2, LAW_RUNE, 2);
break;
case 72: // ape
sendNormalTeleportSpell(player, 64, 76, new Tile(2776, 9103, 0),
FIRE_RUNE, 2, WATER_RUNE, 2, LAW_RUNE, 2, 1963, 1);
break;
}
}
public static void pushLeverTeleport(final Player player, final Tile tile) {
if (!player.getControllerManager().processObjectTeleport(tile))
return;
player.setNextAnimation(new Animation(2140));
player.lock();
EngineTaskManager.schedule(new EngineTask() {
@Override
public void run() {
player.unlock();
Magic.sendObjectTeleportSpell(player, false, tile);
}
}, 1);
}
public static void pushLeverTeleport(final Player player, final Tile tile,
int emote, String startMessage, final String endMessage) {
if (!player.getControllerManager().processObjectTeleport(tile))
return;
player.setNextAnimation(new Animation(emote));
if (startMessage != null)
player.getPackets().sendGameMessage(startMessage, true);
player.lock();
EngineTaskManager.schedule(new EngineTask() {
@Override
public void run() {
player.unlock();
Magic.sendObjectTeleportSpell(player, false, tile);
if (endMessage != null)
player.getPackets().sendGameMessage(endMessage, true);
}
}, 1);
}
public static final void sendAncientTeleportSpell(Player player, int level,
double xp, Tile tile, int... runes) {
sendTeleportSpell(player, 1979, -1, 1681, -1, level, xp, tile, 5, true,
MAGIC_TELEPORT, runes);
}
public static final void sendDelayedObjectTeleportSpell(Player player,
int delay, boolean randomize, Tile tile) {
sendTeleportSpell(player, 8939, 8941, 1576, 1577, 0, 0, tile, delay,
randomize, OBJECT_TELEPORT);
}
public static final boolean sendItemTeleportSpell(Player player,
boolean randomize, int upEmoteId, int upGraphicId, int delay,
Tile tile) {
return sendTeleportSpell(player, upEmoteId, -2, upGraphicId, -1, 0, 0,
tile, delay, randomize, ITEM_TELEPORT);
}
public static final void sendNormalTeleportSpell(Player player, int level,
double xp, Tile tile, int... runes) {
sendTeleportSpell(player, 8939, 8941, 1576, 1577, level, xp, tile, 3,
true, MAGIC_TELEPORT, runes);
}
public static final void sendTeleotherTeleportSpell(Player player,
int level, double xp, Tile tile, int... runes) {
sendTeleportSpell(player, 1816, 8941, 342, -1, level, xp, tile, 3,
true, MAGIC_TELEPORT, runes);
}
public static final void sendObjectTeleportSpell(Player player,
boolean randomize, Tile tile) {
sendTeleportSpell(player, 8939, 8941, 1576, 1577, 0, 0, tile, 3,
randomize, OBJECT_TELEPORT);
}
public static final boolean sendTeleportSpell(final Player player,
int upEmoteId, final int downEmoteId, int upGraphicId,
final int downGraphicId, int level, final double xp,
final Tile tile, int delay, final boolean randomize,
final int teleType, int... runes) {
long currentTime = Utilities.currentTimeMillis();
if (player.getLockDelay() > currentTime)
return false;
if (player.getX() >= 2956
&& player.getX() <= 3067
&& player.getY() >= 5512
&& player.getY() <= 5630
|| (player.getX() >= 2756 && player.getX() <= 2875
&& player.getY() >= 5512 && player.getY() <= 5627)) {
player.getPackets().sendGameMessage(
"A magical force is blocking you from teleporting.");
return false;
}
if (player.getSkills().getLevel(Skills.MAGIC) < level) {
player.getPackets().sendGameMessage(
"Your Magic level is not high enough for this spell.");
return false;
}
if (!checkRunes(player, false, runes))
return false;
if (teleType == MAGIC_TELEPORT) {
if (!player.getControllerManager().processMagicTeleport(tile))
return false;
} else if (teleType == ITEM_TELEPORT) {
if (!player.getControllerManager().processItemTeleport(tile))
return false;
} else if (teleType == OBJECT_TELEPORT) {
if (!player.getControllerManager().processObjectTeleport(tile))
return false;
}
checkRunes(player, true, runes);
player.stopAll();
if (upEmoteId != -1)
player.setNextAnimation(new Animation(upEmoteId));
if (upGraphicId != -1)
player.setNextGraphics(new Graphics(upGraphicId));
if (teleType == MAGIC_TELEPORT)
player.getPackets().sendSound(5527, 0, 2);
player.lock(3 + delay);
EngineTaskManager.schedule(new EngineTask() {
boolean removeDamage;
@Override
public void run() {
if (!removeDamage) {
Tile teleTile = tile;
if (randomize) {
// attemps to randomize tile by 4x4 area
for (int trycount = 0; trycount < 10; trycount++) {
teleTile = new Tile(tile, 2);
if (Engine.canMoveNPC(tile.getZ(), teleTile.getX(),
teleTile.getY(), player.getSize()))
break;
teleTile = tile;
}
}
player.setNextTile(teleTile);
player.getControllerManager().magicTeleported(teleType);
if (player.getControllerManager().getController() == null)
teleControlersCheck(player, teleTile);
if (xp != 0)
player.getSkills().addXp(Skills.MAGIC, xp);
if (downEmoteId != -1)
player.setNextAnimation(new Animation(
downEmoteId == -2 ? -1 : downEmoteId));
if (downGraphicId != -1)
player.setNextGraphics(new Graphics(downGraphicId));
if (teleType == MAGIC_TELEPORT) {
player.getPackets().sendSound(5524, 0, 2);
player.setNextFaceTile(new Tile(teleTile.getX(),
teleTile.getY() - 1, teleTile.getZ()));
player.setDirection(6);
}
removeDamage = true;
} else {
player.resetReceivedDamage();
stop();
}
}
}, delay, 0);
return true;
}
public static final void setCombatSpell(Player player, int spellId) {
if (player.getCombatDefinitions().getAutoCastSpell() == spellId)
player.getCombatDefinitions().resetSpells(true);
else
checkCombatSpell(player, spellId, 0, false);
}
public static void teleControlersCheck(Player player, Tile teleTile) {
if (player.getRegionId() == 11601)
player.getControllerManager().startController("GodWars");
else if (player.getRegionId() == 13626 || player.getRegionId() == 13625)
player.getControllerManager().startController("DungeoneeringLobby");
else if (Wilderness.isAtWild(teleTile))
player.getControllerManager().startController("Wilderness");
else if (RequestController.inWarRequest(player))
player.getControllerManager().startController("clan_wars_request");
else if (FfaZone.inArea(player))
player.getControllerManager().startController("clan_wars_ffa");
}
private static void useHomeTele(Player player) {
player.stopAll();
player.getInterfaceManager().sendInterface(1092);
}
public static boolean useTabTeleport(final Player player, final int itemId) {
if (itemId == 8013) {
if (useTeleTab(player, GameConstants.RESPAWN_PLAYER_LOCATION))
player.getInventory().deleteItem(itemId, 1);
return true;
}
if (itemId < 8007 || itemId > 8007 + TABS.length - 1)
return false;
if (useTeleTab(player, TABS[itemId - 8007]))
player.getInventory().deleteItem(itemId, 1);
return true;
}
public static boolean useTeleTab(final Player player, final Tile tile) {
if (!player.getControllerManager().processItemTeleport(tile))
return false;
player.lock();
player.setNextAnimation(new Animation(9597));
player.setNextGraphics(new Graphics(1680));
EngineTaskManager.schedule(new EngineTask() {
int stage;
@Override
public void run() {
if (stage == 0) {
player.setNextAnimation(new Animation(4731));
stage = 1;
} else if (stage == 1) {
Tile teleTile = tile;
// attemps to randomize tile by 4x4 area
for (int trycount = 0; trycount < 10; trycount++) {
teleTile = new Tile(tile, 2);
if (Engine.canMoveNPC(tile.getZ(), teleTile.getX(),
teleTile.getY(), player.getSize()))
break;
teleTile = tile;
}
player.setNextTile(teleTile);
player.getControllerManager()
.magicTeleported(ITEM_TELEPORT);
if (player.getControllerManager().getController() == null)
teleControlersCheck(player, teleTile);
player.setNextFaceTile(new Tile(teleTile.getX(), teleTile
.getY() - 1, teleTile.getZ()));
player.setDirection(6);
player.setNextAnimation(new Animation(-1));
stage = 2;
} else if (stage == 2) {
player.resetReceivedDamage();
player.unlock();
stop();
}
}
}, 2, 1);
return true;
}
public static void vineTeleport(final Player player, final int x,
final int y, final int plane) {
player.lock(4);
player.stopAll(true);
player.setNextGraphics(new Graphics(1229));
player.setNextAnimation(new Animation(7082));
player.sendMessage("You feel at one with the spirit tree.");
EngineTaskManager.schedule(new EngineTask() {
@Override
public void run() {
player.setNextAnimation(new Animation(7084));
player.setNextTile(new Tile(x, y, plane));
player.checkMovement(x, y, plane);
player.stopAll(true);
stop();
}
}, 3);
}
private Magic() {
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver12;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFPortModFailedErrorMsgVer12 implements OFPortModFailedErrorMsg {
private static final Logger logger = LoggerFactory.getLogger(OFPortModFailedErrorMsgVer12.class);
// version: 1.2
final static byte WIRE_VERSION = 3;
final static int MINIMUM_LENGTH = 12;
private final static long DEFAULT_XID = 0x0L;
private final static OFErrorCauseData DEFAULT_DATA = OFErrorCauseData.NONE;
// OF message fields
private final long xid;
private final OFPortModFailedCode code;
private final OFErrorCauseData data;
//
// package private constructor - used by readers, builders, and factory
OFPortModFailedErrorMsgVer12(long xid, OFPortModFailedCode code, OFErrorCauseData data) {
if(code == null) {
throw new NullPointerException("OFPortModFailedErrorMsgVer12: property code cannot be null");
}
if(data == null) {
throw new NullPointerException("OFPortModFailedErrorMsgVer12: property data cannot be null");
}
this.xid = xid;
this.code = code;
this.data = data;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.PORT_MOD_FAILED;
}
@Override
public OFPortModFailedCode getCode() {
return code;
}
@Override
public OFErrorCauseData getData() {
return data;
}
public OFPortModFailedErrorMsg.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFPortModFailedErrorMsg.Builder {
final OFPortModFailedErrorMsgVer12 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFPortModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
BuilderWithParent(OFPortModFailedErrorMsgVer12 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFPortModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.PORT_MOD_FAILED;
}
@Override
public OFPortModFailedCode getCode() {
return code;
}
@Override
public OFPortModFailedErrorMsg.Builder setCode(OFPortModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFPortModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
@Override
public OFPortModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
OFPortModFailedCode code = this.codeSet ? this.code : parentMessage.code;
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : parentMessage.data;
if(data == null)
throw new NullPointerException("Property data must not be null");
//
return new OFPortModFailedErrorMsgVer12(
xid,
code,
data
);
}
}
static class Builder implements OFPortModFailedErrorMsg.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean codeSet;
private OFPortModFailedCode code;
private boolean dataSet;
private OFErrorCauseData data;
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.ERROR;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFPortModFailedErrorMsg.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFErrorType getErrType() {
return OFErrorType.PORT_MOD_FAILED;
}
@Override
public OFPortModFailedCode getCode() {
return code;
}
@Override
public OFPortModFailedErrorMsg.Builder setCode(OFPortModFailedCode code) {
this.code = code;
this.codeSet = true;
return this;
}
@Override
public OFErrorCauseData getData() {
return data;
}
@Override
public OFPortModFailedErrorMsg.Builder setData(OFErrorCauseData data) {
this.data = data;
this.dataSet = true;
return this;
}
//
@Override
public OFPortModFailedErrorMsg build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
if(!this.codeSet)
throw new IllegalStateException("Property code doesn't have default value -- must be set");
if(code == null)
throw new NullPointerException("Property code must not be null");
OFErrorCauseData data = this.dataSet ? this.data : DEFAULT_DATA;
if(data == null)
throw new NullPointerException("Property data must not be null");
return new OFPortModFailedErrorMsgVer12(
xid,
code,
data
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFPortModFailedErrorMsg> {
@Override
public OFPortModFailedErrorMsg readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 3
byte version = bb.readByte();
if(version != (byte) 0x3)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_12(3), got="+version);
// fixed value property type == 1
byte type = bb.readByte();
if(type != (byte) 0x1)
throw new OFParseError("Wrong type: Expected=OFType.ERROR(1), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property errType == 7
short errType = bb.readShort();
if(errType != (short) 0x7)
throw new OFParseError("Wrong errType: Expected=OFErrorType.PORT_MOD_FAILED(7), got="+errType);
OFPortModFailedCode code = OFPortModFailedCodeSerializerVer12.readFrom(bb);
OFErrorCauseData data = OFErrorCauseData.read(bb, length - (bb.readerIndex() - start), OFVersion.OF_12);
OFPortModFailedErrorMsgVer12 portModFailedErrorMsgVer12 = new OFPortModFailedErrorMsgVer12(
xid,
code,
data
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", portModFailedErrorMsgVer12);
return portModFailedErrorMsgVer12;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFPortModFailedErrorMsgVer12Funnel FUNNEL = new OFPortModFailedErrorMsgVer12Funnel();
static class OFPortModFailedErrorMsgVer12Funnel implements Funnel<OFPortModFailedErrorMsgVer12> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFPortModFailedErrorMsgVer12 message, PrimitiveSink sink) {
// fixed value property version = 3
sink.putByte((byte) 0x3);
// fixed value property type = 1
sink.putByte((byte) 0x1);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property errType = 7
sink.putShort((short) 0x7);
OFPortModFailedCodeSerializerVer12.putTo(message.code, sink);
message.data.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFPortModFailedErrorMsgVer12> {
@Override
public void write(ChannelBuffer bb, OFPortModFailedErrorMsgVer12 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 3
bb.writeByte((byte) 0x3);
// fixed value property type = 1
bb.writeByte((byte) 0x1);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property errType = 7
bb.writeShort((short) 0x7);
OFPortModFailedCodeSerializerVer12.writeTo(bb, message.code);
message.data.writeTo(bb);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFPortModFailedErrorMsgVer12(");
b.append("xid=").append(xid);
b.append(", ");
b.append("code=").append(code);
b.append(", ");
b.append("data=").append(data);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFPortModFailedErrorMsgVer12 other = (OFPortModFailedErrorMsgVer12) obj;
if( xid != other.xid)
return false;
if (code == null) {
if (other.code != null)
return false;
} else if (!code.equals(other.code))
return false;
if (data == null) {
if (other.data != null)
return false;
} else if (!data.equals(other.data))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((code == null) ? 0 : code.hashCode());
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.remoting.netty;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollServerSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
import io.netty.handler.timeout.IdleStateHandler;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import java.net.InetSocketAddress;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.rocketmq.remoting.ChannelEventListener;
import org.apache.rocketmq.remoting.InvokeCallback;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.RemotingServer;
import org.apache.rocketmq.remoting.common.Pair;
import org.apache.rocketmq.remoting.common.RemotingHelper;
import org.apache.rocketmq.remoting.common.RemotingUtil;
import org.apache.rocketmq.remoting.exception.RemotingSendRequestException;
import org.apache.rocketmq.remoting.exception.RemotingTimeoutException;
import org.apache.rocketmq.remoting.exception.RemotingTooMuchRequestException;
import org.apache.rocketmq.remoting.protocol.RemotingCommand;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NettyRemotingServer extends NettyRemotingAbstract implements RemotingServer {
private static final Logger log = LoggerFactory.getLogger(RemotingHelper.ROCKETMQ_REMOTING);
private final ServerBootstrap serverBootstrap;
private final EventLoopGroup eventLoopGroupSelector;
private final EventLoopGroup eventLoopGroupBoss;
private final NettyServerConfig nettyServerConfig;
private final ExecutorService publicExecutor;
private final ChannelEventListener channelEventListener;
private final Timer timer = new Timer("ServerHouseKeepingService", true);
private DefaultEventExecutorGroup defaultEventExecutorGroup;
private RPCHook rpcHook;
private int port = 0;
public NettyRemotingServer(final NettyServerConfig nettyServerConfig) {
this(nettyServerConfig, null);
}
public NettyRemotingServer(final NettyServerConfig nettyServerConfig,
final ChannelEventListener channelEventListener) {
super(nettyServerConfig.getServerOnewaySemaphoreValue(), nettyServerConfig.getServerAsyncSemaphoreValue());
this.serverBootstrap = new ServerBootstrap();
this.nettyServerConfig = nettyServerConfig;
this.channelEventListener = channelEventListener;
int publicThreadNums = nettyServerConfig.getServerCallbackExecutorThreads();
if (publicThreadNums <= 0) {
publicThreadNums = 4;
}
this.publicExecutor = Executors.newFixedThreadPool(publicThreadNums, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "NettyServerPublicExecutor_" + this.threadIndex.incrementAndGet());
}
});
this.eventLoopGroupBoss = new NioEventLoopGroup(1, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyBoss_%d", this.threadIndex.incrementAndGet()));
}
});
if (useEpoll()) {
this.eventLoopGroupSelector = new EpollEventLoopGroup(nettyServerConfig.getServerSelectorThreads(), new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
private int threadTotal = nettyServerConfig.getServerSelectorThreads();
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerEPOLLSelector_%d_%d", threadTotal, this.threadIndex.incrementAndGet()));
}
});
} else {
this.eventLoopGroupSelector = new NioEventLoopGroup(nettyServerConfig.getServerSelectorThreads(), new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
private int threadTotal = nettyServerConfig.getServerSelectorThreads();
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerNIOSelector_%d_%d", threadTotal, this.threadIndex.incrementAndGet()));
}
});
}
}
private boolean useEpoll() {
return RemotingUtil.isLinuxPlatform()
&& nettyServerConfig.isUseEpollNativeSelector()
&& Epoll.isAvailable();
}
@Override
public void start() {
this.defaultEventExecutorGroup = new DefaultEventExecutorGroup(
nettyServerConfig.getServerWorkerThreads(),
new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "NettyServerCodecThread_" + this.threadIndex.incrementAndGet());
}
});
ServerBootstrap childHandler =
this.serverBootstrap.group(this.eventLoopGroupBoss, this.eventLoopGroupSelector)
.channel(useEpoll() ? EpollServerSocketChannel.class : NioServerSocketChannel.class)
.option(ChannelOption.SO_BACKLOG, 1024)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_KEEPALIVE, false)
.childOption(ChannelOption.TCP_NODELAY, true)
.childOption(ChannelOption.SO_SNDBUF, nettyServerConfig.getServerSocketSndBufSize())
.childOption(ChannelOption.SO_RCVBUF, nettyServerConfig.getServerSocketRcvBufSize())
.localAddress(new InetSocketAddress(this.nettyServerConfig.getListenPort()))
.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(
defaultEventExecutorGroup,
new NettyEncoder(),
new NettyDecoder(),
new IdleStateHandler(0, 0, nettyServerConfig.getServerChannelMaxIdleTimeSeconds()),
new NettyConnectManageHandler(),
new NettyServerHandler());
}
});
if (nettyServerConfig.isServerPooledByteBufAllocatorEnable()) {
childHandler.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT);
}
try {
ChannelFuture sync = this.serverBootstrap.bind().sync();
InetSocketAddress addr = (InetSocketAddress) sync.channel().localAddress();
this.port = addr.getPort();
} catch (InterruptedException e1) {
throw new RuntimeException("this.serverBootstrap.bind().sync() InterruptedException", e1);
}
if (this.channelEventListener != null) {
this.nettyEventExecutor.start();
}
this.timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
NettyRemotingServer.this.scanResponseTable();
} catch (Throwable e) {
log.error("scanResponseTable exception", e);
}
}
}, 1000 * 3, 1000);
}
@Override
public void shutdown() {
try {
if (this.timer != null) {
this.timer.cancel();
}
this.eventLoopGroupBoss.shutdownGracefully();
this.eventLoopGroupSelector.shutdownGracefully();
if (this.nettyEventExecutor != null) {
this.nettyEventExecutor.shutdown();
}
if (this.defaultEventExecutorGroup != null) {
this.defaultEventExecutorGroup.shutdownGracefully();
}
} catch (Exception e) {
log.error("NettyRemotingServer shutdown exception, ", e);
}
if (this.publicExecutor != null) {
try {
this.publicExecutor.shutdown();
} catch (Exception e) {
log.error("NettyRemotingServer shutdown exception, ", e);
}
}
}
@Override
public void registerRPCHook(RPCHook rpcHook) {
this.rpcHook = rpcHook;
}
@Override
public void registerProcessor(int requestCode, NettyRequestProcessor processor, ExecutorService executor) {
ExecutorService executorThis = executor;
if (null == executor) {
executorThis = this.publicExecutor;
}
Pair<NettyRequestProcessor, ExecutorService> pair = new Pair<NettyRequestProcessor, ExecutorService>(processor, executorThis);
this.processorTable.put(requestCode, pair);
}
@Override
public void registerDefaultProcessor(NettyRequestProcessor processor, ExecutorService executor) {
this.defaultRequestProcessor = new Pair<NettyRequestProcessor, ExecutorService>(processor, executor);
}
@Override
public int localListenPort() {
return this.port;
}
@Override
public Pair<NettyRequestProcessor, ExecutorService> getProcessorPair(int requestCode) {
return processorTable.get(requestCode);
}
@Override
public RemotingCommand invokeSync(final Channel channel, final RemotingCommand request, final long timeoutMillis)
throws InterruptedException, RemotingSendRequestException, RemotingTimeoutException {
return this.invokeSyncImpl(channel, request, timeoutMillis);
}
@Override
public void invokeAsync(Channel channel, RemotingCommand request, long timeoutMillis, InvokeCallback invokeCallback)
throws InterruptedException, RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException {
this.invokeAsyncImpl(channel, request, timeoutMillis, invokeCallback);
}
@Override
public void invokeOneway(Channel channel, RemotingCommand request, long timeoutMillis) throws InterruptedException,
RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException {
this.invokeOnewayImpl(channel, request, timeoutMillis);
}
@Override
public ChannelEventListener getChannelEventListener() {
return channelEventListener;
}
@Override
public RPCHook getRPCHook() {
return this.rpcHook;
}
@Override
public ExecutorService getCallbackExecutor() {
return this.publicExecutor;
}
class NettyServerHandler extends SimpleChannelInboundHandler<RemotingCommand> {
@Override
protected void channelRead0(ChannelHandlerContext ctx, RemotingCommand msg) throws Exception {
processMessageReceived(ctx, msg);
}
}
class NettyConnectManageHandler extends ChannelDuplexHandler {
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY SERVER PIPELINE: channelRegistered {}", remoteAddress);
super.channelRegistered(ctx);
}
@Override
public void channelUnregistered(ChannelHandlerContext ctx) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY SERVER PIPELINE: channelUnregistered, the channel[{}]", remoteAddress);
super.channelUnregistered(ctx);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY SERVER PIPELINE: channelActive, the channel[{}]", remoteAddress);
super.channelActive(ctx);
if (NettyRemotingServer.this.channelEventListener != null) {
NettyRemotingServer.this.putNettyEvent(new NettyEvent(NettyEventType.CONNECT, remoteAddress, ctx.channel()));
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.info("NETTY SERVER PIPELINE: channelInactive, the channel[{}]", remoteAddress);
super.channelInactive(ctx);
if (NettyRemotingServer.this.channelEventListener != null) {
NettyRemotingServer.this.putNettyEvent(new NettyEvent(NettyEventType.CLOSE, remoteAddress, ctx.channel()));
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
IdleStateEvent event = (IdleStateEvent) evt;
if (event.state().equals(IdleState.ALL_IDLE)) {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.warn("NETTY SERVER PIPELINE: IDLE exception [{}]", remoteAddress);
RemotingUtil.closeChannel(ctx.channel());
if (NettyRemotingServer.this.channelEventListener != null) {
NettyRemotingServer.this
.putNettyEvent(new NettyEvent(NettyEventType.IDLE, remoteAddress, ctx.channel()));
}
}
}
ctx.fireUserEventTriggered(evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
final String remoteAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
log.warn("NETTY SERVER PIPELINE: exceptionCaught {}", remoteAddress);
log.warn("NETTY SERVER PIPELINE: exceptionCaught exception.", cause);
if (NettyRemotingServer.this.channelEventListener != null) {
NettyRemotingServer.this.putNettyEvent(new NettyEvent(NettyEventType.EXCEPTION, remoteAddress, ctx.channel()));
}
RemotingUtil.closeChannel(ctx.channel());
}
}
}
| |
package main;
import game.Player;
import host.connection_listener;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
import java.util.ArrayList;
import javax.swing.JFrame;
import javax.swing.WindowConstants;
import GUI.GUI;
public class p2p_user {
private static final int PORT = 8888;
public static String HOST = "";
public static String BACKUP_HOST="";
public static String hosting="";
private static Object LOCK = new Object();//lock to wait on user's response
public static Socket clientsocket;
public static String name="undefined";
private static boolean connecting=true;
public static volatile boolean connected=false;
public static RSA Users_RSA= new RSA(1024);
public static ArrayList<RSA> other_users_public_keys=new ArrayList<RSA>();
public static ArrayList<String> blacklist=new ArrayList<String>();
private static int height=450;
private static int width=450;
//other classes need the gui's settext method
public static GUI gui=new GUI(height,width);
private static JFrame f = new JFrame("Chat Room");
public static volatile Player p=new Player();
public static ArrayList<String> connectedUsers=new ArrayList<String>();
public static void main(String[] args) {
f.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
f.setSize(height,width);
f.add(gui);
f.pack();
f.setVisible(true);
f.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
while(connecting){
if(!connected){
gui.resetConnectedUsers();
//starting up for the first time, dont know host
if(HOST.equals("")){
gui.set_text("Are you connecting to a server or hosting new one? (C:connecting,H:hosting");
//block to wait for user's response
synchronized (LOCK) {
while (hosting.equals("")) {
try { LOCK.wait(); }
catch (InterruptedException e) {
// treat interrupt as exit request
break;
}
}
}
}
if(hosting.equals("h")){
//if hosting the server, make a connection listener
try {
//start server
ServerSocket server=new ServerSocket(PORT);
//start the listener for connecting clients
Thread connection_listener_thread = new Thread(new connection_listener(server));
connection_listener_thread.start();
} catch (IOException e) {
System.out.println("error creating server");
}
try {
//connect to your own server
BufferedReader in = new BufferedReader(new InputStreamReader(new URL("http://checkip.amazonaws.com").openStream()));
String ip = in.readLine(); //you get the IP as a String
gui.set_text(InetAddress.getLocalHost().getHostAddress()+" is your local ip, "+ip+" is your remote ip");
clientsocket = new Socket(InetAddress.getLocalHost().getHostAddress(), PORT);
} catch (IOException e) {
e.printStackTrace();
System.out.println("error connecting to server you host");
}
connected=true;
}
else if(hosting.equals("c")){
//starting up for the first time, dont know host. This is bypassed for reconnect
if(HOST.equals("")){
gui.set_text("Please enter the host's ip");
//block to wait for user's response
synchronized (LOCK) {
while (HOST.equals("")) {
try { LOCK.wait(); }
catch (InterruptedException e) {
// treat interrupt as exit request
break;
}
}
}
}
try {
gui.set_text("Establishing connection...");
clientsocket = new Socket(HOST, PORT);
connected=true;
} catch (IOException e) {
gui.set_text("Error connecting to server. The address may be invalid or the remote ip may be firewalled");
HOST="";
hosting="";
}
}
if(connected){
//make sure to listen to your own socket.
Thread reciver_thread = new Thread(new listener_receiver());
reciver_thread.start();
//tell server your local ip
try {
String ip=InetAddress.getLocalHost().getHostAddress();
new PrintWriter(clientsocket.getOutputStream(), true).println("User ip="+ip);
} catch (IOException e) {
gui.set_text("Unable to inform host of ip. You cannot become an emergency host.");
}
gui.set_text("You are connected! Type /help for a list of commands.");
}
}
}
}
public static void handle_GUI_input(String users_input){
//non chat commands, program managment user side
if(!connected){
if(users_input.toLowerCase().equals("h")){
hosting="h";
}
else if(users_input.toLowerCase().equals("c")){
hosting="c";
}
else if(users_input.matches("[0-9\\.]+")){
HOST=users_input;
}
//unblock
synchronized (LOCK) {
LOCK.notifyAll();
}
}
else{
//for user to exit (others can see)
if(users_input.equals("/exit")){
connecting=false;
connected=false;
try{
new PrintWriter(clientsocket.getOutputStream(), true).println("<"+name+">"+" : "+users_input+" ["+p.curxp()+"]");
}catch(IOException u){
gui.set_text("ERROR: unable to alert others to exit");
}
try {
clientsocket.close();
} catch (IOException e) {
e.printStackTrace();
gui.set_text("ERROR: Could not exit");
}
gui.closeGUI();
f.dispose();
}
//see commands (others can't see)
else if(users_input.equals("/help")){
gui.set_text("Type '/exit' to exit");
gui.set_text("Type '/nick NEWNAME' to change name.");
gui.set_text("Type '/request USERSNAME key' to be able a private message to a user.");
gui.set_text("Type '/dm USERSNAME m:MESSAGETEXT' to send a private message to a user you have a key from.");
gui.set_text("Type '/block USERNAME' to not see DM's and messages from this user");
gui.set_text("Type '/unblock USERNAME' to unblock a user");
}
//for user to change name (others can see)
else if(users_input.startsWith("/nick")){
String newname=users_input.substring(6);
if(!connectedUsers.contains(newname)){
try{
new PrintWriter(clientsocket.getOutputStream(), true).println(name+" is now called " + newname);
}catch(IOException u){
u.printStackTrace();
gui.set_text("ERROR: unable to alert others to name change");
}
name=users_input.substring(6);
gui.set_text("You are now called "+name);
}else{
gui.set_text("A user with that name already exists.");
}
}
//for user to send a dm to a user using their public key (others can only see encrypted)
else if(users_input.toLowerCase().matches("\\/dm (.*) m\\:(.*)")){
//since a dm is supposed to be private, try to be forgiving if user fudges command
String dm_message=users_input.substring(users_input.toLowerCase().indexOf("m:")+2);
String username=users_input.substring(4,users_input.toLowerCase().indexOf(" m:"));
boolean founduser=false;
for(RSA user:other_users_public_keys){
if(user.name().equals(username)){
founduser=true;
try{
new PrintWriter(clientsocket.getOutputStream(), true).println("<"+name+">"+" : " +
"DM-"+username+
" m-"+user.Encrypt(dm_message));
gui.set_text("Sucessfully send dm message:" +dm_message+" to "+username);
}catch(IOException u){
gui.set_text("ERROR: unable to send DM");
}
}
}
if(!founduser){
gui.set_text("You do not have the key for user " + username + ". Request it and retry your message.");
}
}
//NOTE: i know a user can just change their nick, but this is supposed to be an anonymous chat,
//so i can't block a different way. Besides, if the user doesn't know they're blocked, this works.
//add user to block list (not seen)
else if(users_input.startsWith("/block")){
blacklist.add(users_input.substring(7));
gui.set_text("Blocked " + users_input.substring(7));
}
//add user to unblock list (not seen)
else if(users_input.startsWith("/unblock")){
String user=users_input.substring(9);
if(blacklist.contains(user)){
blacklist.remove(user);
gui.set_text("Unblocked " + user);
}
else{
gui.set_text("User " +user+ " not in list of blocked users");
}
}
//GAME ABILTIES
else if(users_input.startsWith("/kick") && p.hasAbility("kick")){
checkCooldown("kick",users_input);
}
else if(users_input.startsWith("/disable") && p.hasAbility("disable")){
checkCooldown("disable",users_input);
}
else if(users_input.startsWith("/scramble") && p.hasAbility("scramble")){
checkCooldown("scramble",users_input);
}
else if(users_input.startsWith("/forceblock") && p.hasAbility("forceblock")){
checkCooldown("forceblock",users_input);
}
else if(users_input.startsWith("/viewall") && p.hasAbility("viewall")){
p.viewall=!p.viewall;
}
else if(users_input.startsWith("/mimic") && p.hasAbility("mimic")){
int cooltimeleft=p.cooltimeleft("mimic");
if(cooltimeleft<=0){
try{
new PrintWriter(clientsocket.getOutputStream(), true).println("<"+users_input.substring(7,users_input.indexOf(" ",8))+">"+" : "+users_input.substring(users_input.indexOf(" ",8)+1));
}catch(IOException u){
gui.set_text("ERROR: Unable to send.");
}
gui.set_text(users_input);
p.cooldown("mimic");
}else{
gui.set_text("Ability on cooldown, "+cooltimeleft+" seconds remaining.");
}
}
//anything not specifically caught by commands
else{
//write to the socket's output stream and the server picks it up
try{
new PrintWriter(clientsocket.getOutputStream(), true).println("<"+name+">"+" : "+users_input);
}catch(IOException u){
gui.set_text("ERROR: Unable to send.");
}
}
}
}
private static void checkCooldown(String string,String users_input) {
int cooltimeleft=p.cooltimeleft(string);
if(cooltimeleft<=0){
try {
new PrintWriter(clientsocket.getOutputStream(), true).println("<"+name+">"+" : "+users_input);
} catch (IOException e) {
gui.set_text("ERROR: Unable to send.");
}
gui.set_text(users_input);
p.cooldown(string);
}else{
gui.set_text("Ability on cooldown, "+cooltimeleft+" seconds remaining.");
}
}
}
| |
package com.supertoastsdemo;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.Spinner;
import com.actionbarsherlock.app.SherlockFragment;
import com.github.johnpersano.supertoasts.SuperToast;
public class FragmentSuperToast extends SherlockFragment {
Spinner mAnimationSpinner;
Spinner mDurationSpinner;
Spinner mBackgroundSpinner;
Spinner mTextsizeSpinner;
CheckBox mImageCheckBox;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_supertoast,
container, false);
mAnimationSpinner = (Spinner)
view.findViewById(R.id.animationSpinner);
mDurationSpinner = (Spinner)
view.findViewById(R.id.durationSpinner);
mBackgroundSpinner = (Spinner)
view.findViewById(R.id.backgroundSpinner);
mTextsizeSpinner = (Spinner)
view.findViewById(R.id.textsizeSpinner);
mImageCheckBox = (CheckBox)
view.findViewById(R.id.imageCheckBox);
Button showButton = (Button)
view.findViewById(R.id.showButton);
showButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
showSuperToast();
}
});
showButton.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
startActivity(new Intent(getActivity(), ActivityTwo.class));
return false;
}
});
return view;
}
private void showSuperToast() {
final SuperToast superToast = new SuperToast(getActivity());
switch (mAnimationSpinner.getSelectedItemPosition()) {
case 0:
superToast.setAnimations(SuperToast.Animations.FADE);
break;
case 1:
superToast.setAnimations(SuperToast.Animations.FLYIN);
break;
case 2:
superToast.setAnimations(SuperToast.Animations.POPUP);
break;
case 3:
superToast.setAnimations(SuperToast.Animations.SCALE);
break;
}
switch (mDurationSpinner.getSelectedItemPosition()) {
case 0:
superToast.setDuration(SuperToast.Duration.SHORT);
break;
case 1:
superToast.setDuration(SuperToast.Duration.MEDIUM);
break;
case 2:
superToast.setDuration(SuperToast.Duration.LONG);
break;
}
switch (mBackgroundSpinner.getSelectedItemPosition()) {
case 0:
superToast.setBackground(SuperToast.Background.BLACK);
break;
case 1:
superToast.setBackground(SuperToast.Background.GRAY);
break;
case 2:
superToast.setBackground(SuperToast.Background.GREEN);
break;
case 3:
superToast.setBackground(SuperToast.Background.BLUE);
break;
case 4:
superToast.setBackground(SuperToast.Background.RED);
break;
case 5:
superToast.setBackground(SuperToast.Background.PURPLE);
break;
case 6:
superToast.setBackground(SuperToast.Background.ORANGE);
break;
}
switch (mTextsizeSpinner.getSelectedItemPosition()) {
case 0:
superToast.setTextSize(SuperToast.TextSize.SMALL);
break;
case 1:
superToast.setTextSize(SuperToast.TextSize.MEDIUM);
break;
case 2:
superToast.setTextSize(SuperToast.TextSize.LARGE);
break;
}
if(mImageCheckBox.isChecked()) {
superToast.setIcon(R.drawable.icon_message, SuperToast.IconPosition.LEFT);
}
superToast.show();
}
}
| |
package org.ggp.base.util.prover.aima.substituter;
import java.util.ArrayList;
import java.util.List;
import org.ggp.base.util.gdl.grammar.GdlConstant;
import org.ggp.base.util.gdl.grammar.GdlDistinct;
import org.ggp.base.util.gdl.grammar.GdlFunction;
import org.ggp.base.util.gdl.grammar.GdlLiteral;
import org.ggp.base.util.gdl.grammar.GdlNot;
import org.ggp.base.util.gdl.grammar.GdlOr;
import org.ggp.base.util.gdl.grammar.GdlPool;
import org.ggp.base.util.gdl.grammar.GdlProposition;
import org.ggp.base.util.gdl.grammar.GdlRelation;
import org.ggp.base.util.gdl.grammar.GdlRule;
import org.ggp.base.util.gdl.grammar.GdlSentence;
import org.ggp.base.util.gdl.grammar.GdlTerm;
import org.ggp.base.util.gdl.grammar.GdlVariable;
import org.ggp.base.util.prover.aima.substitution.Substitution;
public final class Substituter
{
public static GdlLiteral substitute(GdlLiteral literal, Substitution theta)
{
return substituteLiteral(literal, theta);
}
public static GdlSentence substitute(GdlSentence sentence, Substitution theta)
{
return substituteSentence(sentence, theta);
}
public static GdlRule substitute(GdlRule rule, Substitution theta)
{
return substituteRule(rule, theta);
}
private static GdlConstant substituteConstant(GdlConstant constant, Substitution theta)
{
return constant;
}
private static GdlDistinct substituteDistinct(GdlDistinct distinct, Substitution theta)
{
if (distinct.isGround())
{
return distinct;
}
else
{
GdlTerm arg1 = substituteTerm(distinct.getArg1(), theta);
GdlTerm arg2 = substituteTerm(distinct.getArg2(), theta);
return GdlPool.getDistinct(arg1, arg2);
}
}
private static GdlFunction substituteFunction(GdlFunction function, Substitution theta)
{
if (function.isGround())
{
return function;
}
else
{
GdlConstant name = substituteConstant(function.getName(), theta);
List<GdlTerm> body = new ArrayList<GdlTerm>();
for (int i = 0; i < function.arity(); i++)
{
body.add(substituteTerm(function.get(i), theta));
}
return GdlPool.getFunction(name, body);
}
}
private static GdlLiteral substituteLiteral(GdlLiteral literal, Substitution theta)
{
if (literal instanceof GdlDistinct)
{
return substituteDistinct((GdlDistinct) literal, theta);
}
else if (literal instanceof GdlNot)
{
return substituteNot((GdlNot) literal, theta);
}
else if (literal instanceof GdlOr)
{
return substituteOr((GdlOr) literal, theta);
}
else
{
return substituteSentence((GdlSentence) literal, theta);
}
}
private static GdlNot substituteNot(GdlNot not, Substitution theta)
{
if (not.isGround())
{
return not;
}
else
{
GdlLiteral body = substituteLiteral(not.getBody(), theta);
return GdlPool.getNot(body);
}
}
private static GdlOr substituteOr(GdlOr or, Substitution theta)
{
if (or.isGround())
{
return or;
}
else
{
List<GdlLiteral> disjuncts = new ArrayList<GdlLiteral>();
for (int i = 0; i < or.arity(); i++)
{
disjuncts.add(substituteLiteral(or.get(i), theta));
}
return GdlPool.getOr(disjuncts);
}
}
private static GdlProposition substituteProposition(GdlProposition proposition, Substitution theta)
{
return proposition;
}
private static GdlRelation substituteRelation(GdlRelation relation, Substitution theta)
{
if (relation.isGround())
{
return relation;
}
else
{
GdlConstant name = substituteConstant(relation.getName(), theta);
List<GdlTerm> body = new ArrayList<GdlTerm>();
for (int i = 0; i < relation.arity(); i++)
{
body.add(substituteTerm(relation.get(i), theta));
}
return GdlPool.getRelation(name, body);
}
}
private static GdlSentence substituteSentence(GdlSentence sentence, Substitution theta)
{
if (sentence instanceof GdlProposition)
{
return substituteProposition((GdlProposition) sentence, theta);
}
else
{
return substituteRelation((GdlRelation) sentence, theta);
}
}
private static GdlTerm substituteTerm(GdlTerm term, Substitution theta)
{
if (term instanceof GdlConstant)
{
return substituteConstant((GdlConstant) term, theta);
}
else if (term instanceof GdlVariable)
{
return substituteVariable((GdlVariable) term, theta);
}
else
{
return substituteFunction((GdlFunction) term, theta);
}
}
private static GdlTerm substituteVariable(GdlVariable variable, Substitution theta)
{
if (!theta.contains(variable))
{
return variable;
}
else
{
GdlTerm result = theta.get(variable);
GdlTerm betterResult = null;
while (!(betterResult = substituteTerm(result, theta)).equals(result))
{
result = betterResult;
}
theta.put(variable, result);
return result;
}
}
private static GdlRule substituteRule(GdlRule rule, Substitution theta)
{
GdlSentence head = substitute(rule.getHead(), theta);
List<GdlLiteral> body = new ArrayList<GdlLiteral>();
for ( GdlLiteral literal : rule.getBody() )
{
body.add(substituteLiteral(literal, theta));
}
return GdlPool.getRule(head, body);
}
}
| |
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Gui.Debug.EventLists.Implementations;
import java.awt.Window;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Implementations.CTaggingFunctions;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.Panels.CDebugPerspectiveModel;
import com.google.security.zynamics.binnavi.Gui.MainWindow.Implementations.CNameListGenerators;
import com.google.security.zynamics.binnavi.Gui.Progress.CDefaultProgressOperation;
import com.google.security.zynamics.binnavi.Gui.errordialog.NaviErrorDialog;
import com.google.security.zynamics.binnavi.Tagging.CTag;
import com.google.security.zynamics.binnavi.debug.debugger.interfaces.IDebugger;
import com.google.security.zynamics.binnavi.debug.models.breakpoints.BreakpointAddress;
import com.google.security.zynamics.binnavi.debug.models.breakpoints.enums.BreakpointType;
import com.google.security.zynamics.binnavi.debug.models.trace.TraceList;
import com.google.security.zynamics.binnavi.debug.models.trace.interfaces.ITraceEvent;
import com.google.security.zynamics.binnavi.debug.models.trace.interfaces.ITraceListProvider;
import com.google.security.zynamics.binnavi.disassembly.INaviModule;
import com.google.security.zynamics.binnavi.yfileswrap.zygraph.NaviNode;
import com.google.security.zynamics.binnavi.yfileswrap.zygraph.ZyGraph;
import com.google.security.zynamics.zylib.gui.CMessageBox;
/**
* Contains helper functions for working with traces.
*/
public final class CTraceFunctions {
/**
* You are not supposed to instantiate this class.
*/
private CTraceFunctions() {
}
/**
* Deletes a number of traces from a given event list provider.
*
* @param parent Parent window used for dialogs.
* @param listProvider The event list provider that manages the event lists.
* @param traces Indices of the traces to be deleted.
*/
public static void deleteTrace(
final Window parent, final ITraceListProvider listProvider, final int[] traces) {
Preconditions.checkNotNull(parent, "IE01381: Parent argument can not be null");
Preconditions.checkNotNull(listProvider, "IE01382: List provider argument can't be null");
Preconditions.checkNotNull(traces, "IE01383: Traces argument can't be null");
// At first we get the trace list objects that correspond
// to the indices passed in the traces parameter.
//
// We can not delete the event lists directly or we'd have to keep
// track of changing indices because deleting event lists from
// the manager changes the internal event list indices.
final List<TraceList> traceObjects = new ArrayList<TraceList>();
for (final int trace : traces) {
traceObjects.add(listProvider.getList(trace));
}
if (CMessageBox.showYesNoQuestion(parent, String.format(
"Do you really want to delete the following traces from the database?\n\n%s",
CNameListGenerators.getNameList(traceObjects))) == JOptionPane.YES_OPTION) {
for (final TraceList trace : traceObjects) {
new Thread() {
@Override
public void run() {
final CDefaultProgressOperation operation =
new CDefaultProgressOperation("", true, false);
operation.getProgressPanel().setMaximum(1);
operation.getProgressPanel().setText("Deleting trace" + ": " + trace.getName());
try {
listProvider.removeList(trace);
operation.getProgressPanel().next();
} catch (final CouldntDeleteException e) {
CUtilityFunctions.logException(e);
final String innerMessage = "E00075: " + "Could not delete trace list";
final String innerDescription = CUtilityFunctions.createDescription(String.format(
"The trace list '%s' could not be deleted.", trace.getName()),
new String[] {
"There was a problem with the database connection."}, new String[] {
"The trace list was not deleted. You could try to delete the list again once the connection problem was resolved."});
NaviErrorDialog.show(parent, innerMessage, innerDescription, e);
} finally {
operation.stop();
}
}
}.start();
}
}
}
/**
* Takes the current graph and selects all nodes of the graph where trace events of the given
* event list occurred.
*
* @param graph The graph where the trace list is shown.
* @param list The event list that is selected in the graph.
*/
public static void selectList(final ZyGraph graph, final TraceList list) {
Preconditions.checkNotNull(graph, "IE01384: Graph argument can not be null");
Preconditions.checkNotNull(list, "IE01385: List argument can not be null");
graph.selectNodes(CTraceNodeFinder.getTraceNodes(graph, list), true);
}
/**
* Sets breakpoints on trace events.
*
* @param model Provides the active debugger.
* @param events Events on which breakpoints are set.
*/
public static void setBreakpoints(
final CDebugPerspectiveModel model, final List<ITraceEvent> events) {
final IDebugger debugger = model.getCurrentSelectedDebugger();
if (debugger == null) {
return;
}
final List<INaviModule> modules = debugger.getModules();
final Set<BreakpointAddress> addresses = new HashSet<BreakpointAddress>();
for (final ITraceEvent event : events) {
final BreakpointAddress address = event.getOffset();
if ((address.getModule() == null) || (modules.contains(address.getModule())
&& !debugger.getBreakpointManager().hasBreakpoint(BreakpointType.REGULAR, address))) {
addresses.add(address);
}
}
debugger.getBreakpointManager().addBreakpoints(BreakpointType.REGULAR, addresses);
}
/**
* Changes the description of a trace list.
*
* @param parent Parent window used for dialogs.
* @param eventList The trace list in question.
* @param description The new description of the trace list.
*/
public static void setTraceDescription(
final JFrame parent, final TraceList eventList, final String description) {
Preconditions.checkNotNull(eventList, "IE01387: Event list argument can't be null");
Preconditions.checkNotNull(description, "IE01388: Description argument can't be null");
try {
eventList.setDescription(description);
} catch (final CouldntSaveDataException e) {
CUtilityFunctions.logException(e);
final String innerMessage = "E00076: " + "Could not change trace description";
final String innerDescription = CUtilityFunctions.createDescription(String.format(
"The description of the trace list '%s' could not be changed.", eventList.getName()),
new String[] {"There was a problem with the database connection."},
new String[] {
"The trace list keeps its old description. You could try changing the description again once the connection problem was resolved."});
NaviErrorDialog.show(parent, innerMessage, innerDescription, e);
}
}
/**
* Changes the name of a trace list.
*
* @param parent Parent window used for dialogs.
* @param eventList The trace list in question.
* @param name The new name of the trace list.
*/
public static void setTraceName(
final JFrame parent, final TraceList eventList, final String name) {
Preconditions.checkNotNull(eventList, "IE01390: Event list argument can't be null");
Preconditions.checkNotNull(name, "IE01391: Name argument can't be null");
try {
eventList.setName(name);
} catch (final CouldntSaveDataException e) {
CUtilityFunctions.logException(e);
final String innerMessage = "E00077: " + "Could not change trace name";
final String innerDescription = CUtilityFunctions.createDescription(String.format(
"The name of the trace list '%s' could not be changed.", eventList.getName()),
new String[] {"There was a problem with the database connection."}, new String[] {
"The trace list keeps its old name. You could try changing the name again once the connection problem was resolved."});
NaviErrorDialog.show(parent, innerMessage, innerDescription, e);
}
}
/**
* Tags all nodes hit by an event list with a given tag.
*
* @param parent Parent window used for dialogs.
* @param graph Graph whose nodes are tagged.
* @param list List that provides the events.
* @param tag Tag the nodes are tagged with.
*/
public static void tagList(
final JFrame parent, final ZyGraph graph, final TraceList list, final CTag tag) {
final List<NaviNode> nodes = CTraceNodeFinder.getTraceNodes(graph, list);
for (final NaviNode node : nodes) {
CTaggingFunctions.tagNode(parent, node, tag);
}
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.jaxrs.json;
import java.util.LinkedList;
import java.util.List;
import javax.annotation.Nullable;
import org.joda.time.LocalDate;
import org.killbill.billing.catalog.api.BillingPeriod;
import org.killbill.billing.catalog.api.PlanPhase;
import org.killbill.billing.catalog.api.PriceList;
import org.killbill.billing.catalog.api.Product;
import org.killbill.billing.entitlement.api.Subscription;
import org.killbill.billing.entitlement.api.SubscriptionEvent;
import org.killbill.billing.util.audit.AccountAuditLogs;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.wordnik.swagger.annotations.ApiModelProperty;
public class SubscriptionJson extends JsonBase {
@ApiModelProperty(dataType = "java.util.UUID")
private final String accountId;
@ApiModelProperty(dataType = "java.util.UUID")
private final String bundleId;
@ApiModelProperty(dataType = "java.util.UUID")
private final String subscriptionId;
private final String externalKey;
private final LocalDate startDate;
@ApiModelProperty(required = true)
private final String productName;
@ApiModelProperty(dataType = "org.killbill.billing.catalog.api.ProductCategory", required = true)
private final String productCategory;
@ApiModelProperty(dataType = "org.killbill.billing.catalog.api.BillingPeriod", required = true)
private final String billingPeriod;
@ApiModelProperty(required = true)
private final String priceList;
private final LocalDate cancelledDate;
private final LocalDate chargedThroughDate;
private final LocalDate billingStartDate;
private final LocalDate billingEndDate;
private final List<EventSubscriptionJson> events;
private final List<DeletedEventSubscriptionJson> deletedEvents;
private final List<NewEventSubscriptionJson> newEvents;
public static class EventSubscriptionJson extends EventBaseSubscriptionJson {
private final String eventId;
private final LocalDate effectiveDate;
@JsonCreator
public EventSubscriptionJson(@JsonProperty("eventId") final String eventId,
@JsonProperty("billingPeriod") final String billingPeriod,
@JsonProperty("requestedDt") final LocalDate requestedDate,
@JsonProperty("effectiveDt") final LocalDate effectiveDate,
@JsonProperty("product") final String product,
@JsonProperty("priceList") final String priceList,
@JsonProperty("eventType") final String eventType,
@JsonProperty("phase") final String phase,
@JsonProperty("auditLogs") @Nullable final List<AuditLogJson> auditLogs) {
super(billingPeriod, requestedDate, product, priceList, eventType, phase, auditLogs);
this.eventId = eventId;
this.effectiveDate = effectiveDate;
}
public String getEventId() {
return eventId;
}
public LocalDate getEffectiveDate() {
return effectiveDate;
}
@Override
public String toString() {
return "EventSubscriptionJson [eventId=" + eventId
+ ", effectiveDate=" + effectiveDate
+ ", getBillingPeriod()=" + getBillingPeriod()
+ ", getRequestedDate()=" + getRequestedDate()
+ ", getProduct()=" + getProduct() + ", getPriceList()="
+ getPriceList() + ", getEventType()=" + getEventType()
+ ", getPhase()=" + getPhase() + ", getClass()="
+ getClass() + ", hashCode()=" + hashCode()
+ ", toString()=" + super.toString() + "]";
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final EventSubscriptionJson that = (EventSubscriptionJson) o;
if (effectiveDate != null ? !effectiveDate.equals(that.effectiveDate) : that.effectiveDate != null) {
return false;
}
if (eventId != null ? !eventId.equals(that.eventId) : that.eventId != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = eventId != null ? eventId.hashCode() : 0;
result = 31 * result + (effectiveDate != null ? effectiveDate.hashCode() : 0);
return result;
}
}
public static class DeletedEventSubscriptionJson extends EventSubscriptionJson {
@JsonCreator
public DeletedEventSubscriptionJson(@JsonProperty("eventId") final String eventId,
@JsonProperty("billingPeriod") final String billingPeriod,
@JsonProperty("requestedDate") final LocalDate requestedDate,
@JsonProperty("effectiveDate") final LocalDate effectiveDate,
@JsonProperty("product") final String product,
@JsonProperty("priceList") final String priceList,
@JsonProperty("eventType") final String eventType,
@JsonProperty("phase") final String phase,
@JsonProperty("auditLogs") @Nullable final List<AuditLogJson> auditLogs) {
super(eventId, billingPeriod, requestedDate, effectiveDate, product, priceList, eventType, phase, auditLogs);
}
}
public static class NewEventSubscriptionJson extends EventBaseSubscriptionJson {
@JsonCreator
public NewEventSubscriptionJson(@JsonProperty("billingPeriod") final String billingPeriod,
@JsonProperty("requestedDate") final LocalDate requestedDate,
@JsonProperty("product") final String product,
@JsonProperty("priceList") final String priceList,
@JsonProperty("eventType") final String eventType,
@JsonProperty("phase") final String phase,
@JsonProperty("auditLogs") @Nullable final List<AuditLogJson> auditLogs) {
super(billingPeriod, requestedDate, product, priceList, eventType, phase, auditLogs);
}
@Override
public String toString() {
return "NewEventSubscriptionJson [getBillingPeriod()="
+ getBillingPeriod() + ", getRequestedDate()="
+ getRequestedDate() + ", getProduct()=" + getProduct()
+ ", getPriceList()=" + getPriceList()
+ ", getEventType()=" + getEventType() + ", getPhase()="
+ getPhase() + ", getClass()=" + getClass()
+ ", hashCode()=" + hashCode() + ", toString()="
+ super.toString() + "]";
}
}
public abstract static class EventBaseSubscriptionJson extends JsonBase {
private final String billingPeriod;
private final LocalDate requestedDate;
private final String product;
private final String priceList;
private final String eventType;
private final String phase;
@JsonCreator
public EventBaseSubscriptionJson(@JsonProperty("billingPeriod") final String billingPeriod,
@JsonProperty("requestedDate") final LocalDate requestedDate,
@JsonProperty("product") final String product,
@JsonProperty("priceList") final String priceList,
@JsonProperty("eventType") final String eventType,
@JsonProperty("phase") final String phase,
@JsonProperty("auditLogs") @Nullable final List<AuditLogJson> auditLogs) {
super(auditLogs);
this.billingPeriod = billingPeriod;
this.requestedDate = requestedDate;
this.product = product;
this.priceList = priceList;
this.eventType = eventType;
this.phase = phase;
}
public String getBillingPeriod() {
return billingPeriod;
}
public LocalDate getRequestedDate() {
return requestedDate;
}
public String getProduct() {
return product;
}
public String getPriceList() {
return priceList;
}
public String getEventType() {
return eventType;
}
public String getPhase() {
return phase;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("EventBaseSubscriptionJson");
sb.append("{billingPeriod='").append(billingPeriod).append('\'');
sb.append(", requestedDate=").append(requestedDate);
sb.append(", product='").append(product).append('\'');
sb.append(", priceList='").append(priceList).append('\'');
sb.append(", eventType='").append(eventType).append('\'');
sb.append(", phase='").append(phase).append('\'');
sb.append('}');
return sb.toString();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final EventBaseSubscriptionJson that = (EventBaseSubscriptionJson) o;
if (billingPeriod != null ? !billingPeriod.equals(that.billingPeriod) : that.billingPeriod != null) {
return false;
}
if (eventType != null ? !eventType.equals(that.eventType) : that.eventType != null) {
return false;
}
if (phase != null ? !phase.equals(that.phase) : that.phase != null) {
return false;
}
if (priceList != null ? !priceList.equals(that.priceList) : that.priceList != null) {
return false;
}
if (product != null ? !product.equals(that.product) : that.product != null) {
return false;
}
if (requestedDate != null ? !requestedDate.equals(that.requestedDate) : that.requestedDate != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = billingPeriod != null ? billingPeriod.hashCode() : 0;
result = 31 * result + (requestedDate != null ? requestedDate.hashCode() : 0);
result = 31 * result + (product != null ? product.hashCode() : 0);
result = 31 * result + (priceList != null ? priceList.hashCode() : 0);
result = 31 * result + (eventType != null ? eventType.hashCode() : 0);
result = 31 * result + (phase != null ? phase.hashCode() : 0);
return result;
}
}
@JsonCreator
public SubscriptionJson(@JsonProperty("accountId") @Nullable final String accountId,
@JsonProperty("bundleId") @Nullable final String bundleId,
@JsonProperty("subscriptionId") @Nullable final String subscriptionId,
@JsonProperty("externalKey") @Nullable final String externalKey,
@JsonProperty("startDate") @Nullable final LocalDate startDate,
@JsonProperty("productName") @Nullable final String productName,
@JsonProperty("productCategory") @Nullable final String productCategory,
@JsonProperty("billingPeriod") @Nullable final String billingPeriod,
@JsonProperty("priceList") @Nullable final String priceList,
@JsonProperty("cancelledDate") @Nullable final LocalDate cancelledDate,
@JsonProperty("chargedThroughDate") @Nullable final LocalDate chargedThroughDate,
@JsonProperty("billingStartDate") @Nullable final LocalDate billingStartDate,
@JsonProperty("billingEndDate") @Nullable final LocalDate billingEndDate,
@JsonProperty("events") @Nullable final List<EventSubscriptionJson> events,
@JsonProperty("newEvents") @Nullable final List<NewEventSubscriptionJson> newEvents,
@JsonProperty("deletedEvents") @Nullable final List<DeletedEventSubscriptionJson> deletedEvents,
@JsonProperty("auditLogs") @Nullable final List<AuditLogJson> auditLogs) {
super(auditLogs);
this.startDate = startDate;
this.productName = productName;
this.productCategory = productCategory;
this.billingPeriod = billingPeriod;
this.priceList = priceList;
this.cancelledDate = cancelledDate;
this.chargedThroughDate = chargedThroughDate;
this.billingStartDate = billingStartDate;
this.billingEndDate = billingEndDate;
this.accountId = accountId;
this.bundleId = bundleId;
this.subscriptionId = subscriptionId;
this.externalKey = externalKey;
this.events = events;
this.deletedEvents = deletedEvents;
this.newEvents = newEvents;
}
public SubscriptionJson(final Subscription subscription,
final List<SubscriptionEvent> subscriptionEvents,
@Nullable final AccountAuditLogs accountAuditLogs) {
super(toAuditLogJson(accountAuditLogs == null ? null : accountAuditLogs.getAuditLogsForSubscription(subscription.getId())));
this.startDate = subscription.getEffectiveStartDate();
this.productName = subscription.getLastActiveProduct().getName();
this.productCategory = subscription.getLastActiveProductCategory().name();
this.billingPeriod = subscription.getLastActivePlan().getRecurringBillingPeriod().toString();
this.priceList = subscription.getLastActivePriceList().getName();
this.cancelledDate = subscription.getEffectiveEndDate();
this.chargedThroughDate = subscription.getChargedThroughDate();
this.billingStartDate = subscription.getBillingStartDate();
this.billingEndDate = subscription.getBillingEndDate();
this.accountId = subscription.getAccountId().toString();
this.bundleId = subscription.getBundleId().toString();
this.subscriptionId = subscription.getId().toString();
this.externalKey = subscription.getExternalKey();
this.events = subscriptionEvents != null ? new LinkedList<EventSubscriptionJson>() : null;
if (events != null) {
for (final SubscriptionEvent cur : subscriptionEvents) {
final BillingPeriod billingPeriod = cur.getNextBillingPeriod() != null ? cur.getNextBillingPeriod() : cur.getPrevBillingPeriod();
final Product product = cur.getNextProduct() != null ? cur.getNextProduct() : cur.getPrevProduct();
final PriceList priceList = cur.getNextPriceList() != null ? cur.getNextPriceList() : cur.getPrevPriceList();
final PlanPhase phase = cur.getNextPhase() != null ? cur.getNextPhase() : cur.getPrevPhase();
this.events.add(new EventSubscriptionJson(cur.getId().toString(),
billingPeriod != null ? billingPeriod.toString() : null,
cur.getRequestedDate(),
cur.getEffectiveDate(),
product != null ? product.getName() : null,
priceList != null ? priceList.getName() : null,
cur.getSubscriptionEventType().toString(),
phase != null ? phase.getName() : null,
toAuditLogJson(accountAuditLogs == null ? null : accountAuditLogs.getAuditLogsForSubscriptionEvent(cur.getId()))));
}
}
this.newEvents = null;
this.deletedEvents = null;
}
public String getAccountId() {
return accountId;
}
public String getBundleId() {
return bundleId;
}
public String getSubscriptionId() {
return subscriptionId;
}
public String getExternalKey() {
return externalKey;
}
public LocalDate getStartDate() {
return startDate;
}
public String getProductName() {
return productName;
}
public String getProductCategory() {
return productCategory;
}
public String getBillingPeriod() {
return billingPeriod;
}
public String getPriceList() {
return priceList;
}
public LocalDate getCancelledDate() {
return cancelledDate;
}
public LocalDate getChargedThroughDate() {
return chargedThroughDate;
}
public LocalDate getBillingStartDate() {
return billingStartDate;
}
public LocalDate getBillingEndDate() {
return billingEndDate;
}
public List<EventSubscriptionJson> getEvents() {
return events;
}
public List<DeletedEventSubscriptionJson> getDeletedEvents() {
return deletedEvents;
}
public List<NewEventSubscriptionJson> getNewEvents() {
return newEvents;
}
@Override
public String toString() {
return "SubscriptionJson{" +
"accountId='" + accountId + '\'' +
", bundleId='" + bundleId + '\'' +
", subscriptionId='" + subscriptionId + '\'' +
", externalKey='" + externalKey + '\'' +
", startDate=" + startDate +
", productName='" + productName + '\'' +
", productCategory='" + productCategory + '\'' +
", billingPeriod='" + billingPeriod + '\'' +
", priceList='" + priceList + '\'' +
", cancelledDate=" + cancelledDate +
", chargedThroughDate=" + chargedThroughDate +
", billingStartDate=" + billingStartDate +
", billingEndDate=" + billingEndDate +
", events=" + events +
", deletedEvents=" + deletedEvents +
", newEvents=" + newEvents +
'}';
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final SubscriptionJson that = (SubscriptionJson) o;
if (accountId != null ? !accountId.equals(that.accountId) : that.accountId != null) {
return false;
}
if (billingEndDate != null ? billingEndDate.compareTo(that.billingEndDate) != 0 : that.billingEndDate != null) {
return false;
}
if (billingPeriod != null ? !billingPeriod.equals(that.billingPeriod) : that.billingPeriod != null) {
return false;
}
if (billingStartDate != null ? billingStartDate.compareTo(that.billingStartDate) != 0 : that.billingStartDate != null) {
return false;
}
if (bundleId != null ? !bundleId.equals(that.bundleId) : that.bundleId != null) {
return false;
}
if (cancelledDate != null ? cancelledDate.compareTo(that.cancelledDate) != 0 : that.cancelledDate != null) {
return false;
}
if (chargedThroughDate != null ? chargedThroughDate.compareTo(that.chargedThroughDate) != 0 : that.chargedThroughDate != null) {
return false;
}
if (deletedEvents != null ? !deletedEvents.equals(that.deletedEvents) : that.deletedEvents != null) {
return false;
}
if (events != null ? !events.equals(that.events) : that.events != null) {
return false;
}
if (externalKey != null ? !externalKey.equals(that.externalKey) : that.externalKey != null) {
return false;
}
if (newEvents != null ? !newEvents.equals(that.newEvents) : that.newEvents != null) {
return false;
}
if (priceList != null ? !priceList.equals(that.priceList) : that.priceList != null) {
return false;
}
if (productCategory != null ? !productCategory.equals(that.productCategory) : that.productCategory != null) {
return false;
}
if (productName != null ? !productName.equals(that.productName) : that.productName != null) {
return false;
}
if (startDate != null ? startDate.compareTo(that.startDate) != 0 : that.startDate != null) {
return false;
}
if (subscriptionId != null ? !subscriptionId.equals(that.subscriptionId) : that.subscriptionId != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = accountId != null ? accountId.hashCode() : 0;
result = 31 * result + (bundleId != null ? bundleId.hashCode() : 0);
result = 31 * result + (subscriptionId != null ? subscriptionId.hashCode() : 0);
result = 31 * result + (externalKey != null ? externalKey.hashCode() : 0);
result = 31 * result + (startDate != null ? startDate.hashCode() : 0);
result = 31 * result + (productName != null ? productName.hashCode() : 0);
result = 31 * result + (productCategory != null ? productCategory.hashCode() : 0);
result = 31 * result + (billingPeriod != null ? billingPeriod.hashCode() : 0);
result = 31 * result + (priceList != null ? priceList.hashCode() : 0);
result = 31 * result + (cancelledDate != null ? cancelledDate.hashCode() : 0);
result = 31 * result + (chargedThroughDate != null ? chargedThroughDate.hashCode() : 0);
result = 31 * result + (billingStartDate != null ? billingStartDate.hashCode() : 0);
result = 31 * result + (billingEndDate != null ? billingEndDate.hashCode() : 0);
result = 31 * result + (events != null ? events.hashCode() : 0);
result = 31 * result + (deletedEvents != null ? deletedEvents.hashCode() : 0);
result = 31 * result + (newEvents != null ? newEvents.hashCode() : 0);
return result;
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.apimgt.migration.client.internal;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.wso2.carbon.apimgt.impl.APIManagerConfigurationService;
import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil;
import org.wso2.carbon.apimgt.migration.APIMigrationException;
import org.wso2.carbon.apimgt.migration.client.MigrateFrom17to18;
import org.wso2.carbon.apimgt.migration.util.Constants;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.service.TenantRegistryLoader;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.utils.CarbonUtils;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
/**
* @scr.component name="org.wso2.carbon.apimgt.migration.client" immediate="true"
* @scr.reference name="realm.service"
* interface="org.wso2.carbon.user.core.service.RealmService" cardinality="1..1"
* policy="dynamic" bind="setRealmService" unbind="unsetRealmService"
* @scr.reference name="registry.service"
* interface="org.wso2.carbon.registry.core.service.RegistryService" cardinality="1..1"
* policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
* @scr.reference name="registry.core.dscomponent"
* interface="org.wso2.carbon.registry.core.service.RegistryService" cardinality="1..1"
* policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
* @scr.reference name="tenant.registryloader" interface="org.wso2.carbon.registry.core.service.TenantRegistryLoader" cardinality="1..1"
* policy="dynamic" bind="setTenantRegistryLoader" unbind="unsetTenantRegistryLoader"
* @scr.reference name="apim.configuration" interface="org.wso2.carbon.apimgt.impl.APIManagerConfigurationService" cardinality="1..1"
* policy="dynamic" bind="setApiManagerConfig" unbind="unsetApiManagerConfig"
*/
@SuppressWarnings("unused")
public class APIMMigrationServiceComponent {
private static final Log log = LogFactory.getLog(APIMMigrationServiceComponent.class);
/**
* Method to activate bundle.
*
* @param context OSGi component context.
*/
protected void activate(ComponentContext context) {
boolean isCorrectProductVersion = false;
try {
APIMgtDBUtil.initialize();
} catch (Exception e) {
//APIMgtDBUtil.initialize() throws generic exception
log.error("Error occurred while initializing DB Util ", e);
}
// Product and version validation
File carbonXmlConfig = new File(CarbonUtils.getServerXml());
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
Document doc = docBuilder.parse(carbonXmlConfig);
doc.getDocumentElement().normalize();
NodeList nameNodes = doc.getElementsByTagName("Name");
if (nameNodes.getLength() > 0) {
Element name = (Element) nameNodes.item(0);
if (Constants.APIM_PRODUCT_NAME.equals(name.getTextContent())) {
NodeList versionNodes = doc.getElementsByTagName("Version");
if (versionNodes.getLength() > 0) {
Element version = (Element) versionNodes.item(0);
if (Constants.VERSION_1_8.equals(version.getTextContent())) {
isCorrectProductVersion = true;
}
}
}
}
} catch (ParserConfigurationException e) {
log.error("ParserConfigurationException when processing carbon.xml", e);
} catch (SAXException e) {
log.error("SAXException when processing carbon.xml", e);
} catch (IOException e) {
log.error("IOException when processing carbon.xml", e);
}
String tenants = System.getProperty(Constants.ARG_MIGRATE_TENANTS);
boolean migrateAll = Boolean.parseBoolean(System.getProperty(Constants.ARG_MIGRATE_ALL));
try {
if (isCorrectProductVersion) {
log.info("Migrating WSO2 API Manager " + Constants.PREVIOUS_VERSION + " to WSO2 API Manager " + Constants.VERSION_1_8);
// Create a thread and wait till the APIManager DBUtils is initialized
MigrateFrom17to18 migrateFrom17to18 = new MigrateFrom17to18(tenants);
boolean isArgumentValid = false;
//Default operation will migrate all three types of resources
if (migrateAll) {
log.info("Migrating WSO2 API Manager " + Constants.PREVIOUS_VERSION + " resources to WSO2 API Manager " + Constants.VERSION_1_8);
migrateFrom17to18.registryResourceMigration();
isArgumentValid = true;
}
if (isArgumentValid) {
log.info("API Manager " + Constants.PREVIOUS_VERSION + " to " + Constants.VERSION_1_8 + " migration successfully completed");
}
} else {
log.error("Migration client installed in incompatible product version. This migration client is only compatible with " +
Constants.APIM_PRODUCT_NAME + " " + Constants.VERSION_1_8 + ". Please verify the product/version in use.");
}
} catch (APIMigrationException e) {
log.error("API Management exception occurred while migrating", e);
} catch (UserStoreException e) {
log.error("User store exception occurred while migrating", e);
} catch (Exception e) {
log.error("Generic exception occurred while migrating", e);
} catch (Throwable t) {
log.error("Throwable error", t);
}
log.info("WSO2 API Manager migration component successfully activated.");
}
/**
* Method to deactivate bundle.
*
* @param context OSGi component context.
*/
protected void deactivate(ComponentContext context) {
log.info("WSO2 API Manager migration bundle is deactivated");
}
/**
* Method to set registry service.
*
* @param registryService service to get tenant data.
*/
protected void setRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("Setting RegistryService for WSO2 API Manager migration");
}
ServiceHolder.setRegistryService(registryService);
}
/**
* Method to unset registry service.
*
* @param registryService service to get registry data.
*/
protected void unsetRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("Unset Registry service");
}
ServiceHolder.setRegistryService(null);
}
/**
* Method to set realm service.
*
* @param realmService service to get tenant data.
*/
protected void setRealmService(RealmService realmService) {
log.debug("Setting RealmService for WSO2 API Manager migration");
ServiceHolder.setRealmService(realmService);
}
/**
* Method to unset realm service.
*
* @param realmService service to get tenant data.
*/
protected void unsetRealmService(RealmService realmService) {
if (log.isDebugEnabled()) {
log.debug("Unset Realm service");
}
ServiceHolder.setRealmService(null);
}
/**
* Method to set tenant registry loader
*
* @param tenantRegLoader tenant registry loader
*/
protected void setTenantRegistryLoader(TenantRegistryLoader tenantRegLoader) {
log.debug("Setting TenantRegistryLoader for WSO2 API Manager migration");
ServiceHolder.setTenantRegLoader(tenantRegLoader);
}
/**
* Method to unset tenant registry loader
*
* @param tenantRegLoader tenant registry loader
*/
protected void unsetTenantRegistryLoader(TenantRegistryLoader tenantRegLoader) {
log.debug("Unset Tenant Registry Loader");
ServiceHolder.setTenantRegLoader(null);
}
/**
* Method to set API Manager configuration
*
* @param apiManagerConfig api manager configuration
*/
protected void setApiManagerConfig(APIManagerConfigurationService apiManagerConfig) {
log.info("Setting APIManager configuration");
}
/**
* Method to unset API manager configuration
*
* @param apiManagerConfig api manager configuration
*/
protected void unsetApiManagerConfig(APIManagerConfigurationService apiManagerConfig) {
log.info("Un-setting APIManager configuration");
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.daemon.impl.analysis.CustomHighlightInfoHolder;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightInfoHolder;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager;
import com.intellij.codeInsight.problems.ProblemImpl;
import com.intellij.concurrency.JobScheduler;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.problems.Problem;
import com.intellij.problems.WolfTheProblemSolver;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.search.PsiTodoSearchHelper;
import com.intellij.psi.search.TodoItem;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.NotNullProducer;
import com.intellij.util.SmartList;
import com.intellij.util.containers.Stack;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class GeneralHighlightingPass extends ProgressableTextEditorHighlightingPass implements DumbAware {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.GeneralHighlightingPass");
private static final String PRESENTABLE_NAME = DaemonBundle.message("pass.syntax");
private static final Key<Boolean> HAS_ERROR_ELEMENT = Key.create("HAS_ERROR_ELEMENT");
protected static final Condition<PsiFile> SHOULD_HIGHIGHT_FILTER = new Condition<PsiFile>() {
@Override
public boolean value(PsiFile file) {
return HighlightingLevelManager.getInstance(file.getProject()).shouldHighlight(file);
}
};
private static final Random RESTART_DAEMON_RANDOM = new Random();
protected final boolean myUpdateAll;
protected final ProperTextRange myPriorityRange;
protected final List<HighlightInfo> myHighlights = new ArrayList<HighlightInfo>();
protected volatile boolean myHasErrorElement;
private volatile boolean myErrorFound;
protected final EditorColorsScheme myGlobalScheme;
private volatile NotNullProducer<HighlightVisitor[]> myHighlightVisitorProducer = new NotNullProducer<HighlightVisitor[]>() {
@NotNull
@Override
public HighlightVisitor[] produce() {
return cloneHighlightVisitors();
}
};
public GeneralHighlightingPass(@NotNull Project project,
@NotNull PsiFile file,
@NotNull Document document,
int startOffset,
int endOffset,
boolean updateAll,
@NotNull ProperTextRange priorityRange,
@Nullable Editor editor,
@NotNull HighlightInfoProcessor highlightInfoProcessor) {
super(project, document, PRESENTABLE_NAME, file, editor, TextRange.create(startOffset, endOffset), true, highlightInfoProcessor);
myUpdateAll = updateAll;
myPriorityRange = priorityRange;
PsiUtilCore.ensureValid(file);
boolean wholeFileHighlighting = isWholeFileHighlighting();
myHasErrorElement = !wholeFileHighlighting && Boolean.TRUE.equals(myFile.getUserData(HAS_ERROR_ELEMENT));
final DaemonCodeAnalyzerEx daemonCodeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(myProject);
FileStatusMap fileStatusMap = daemonCodeAnalyzer.getFileStatusMap();
myErrorFound = !wholeFileHighlighting && fileStatusMap.wasErrorFound(myDocument);
// initial guess to show correct progress in the traffic light icon
setProgressLimit(document.getTextLength()/2); // approx number of PSI elements = file length/2
myGlobalScheme = editor != null ? editor.getColorsScheme() : EditorColorsManager.getInstance().getGlobalScheme();
}
private static final Key<AtomicInteger> HIGHLIGHT_VISITOR_INSTANCE_COUNT = new Key<AtomicInteger>("HIGHLIGHT_VISITOR_INSTANCE_COUNT");
@NotNull
private HighlightVisitor[] cloneHighlightVisitors() {
int oldCount = incVisitorUsageCount(1);
HighlightVisitor[] highlightVisitors = Extensions.getExtensions(HighlightVisitor.EP_HIGHLIGHT_VISITOR, myProject);
if (oldCount != 0) {
HighlightVisitor[] clones = new HighlightVisitor[highlightVisitors.length];
for (int i = 0; i < highlightVisitors.length; i++) {
HighlightVisitor highlightVisitor = highlightVisitors[i];
HighlightVisitor cloned = highlightVisitor.clone();
assert cloned.getClass() == highlightVisitor.getClass() : highlightVisitor.getClass()+".clone() must return a copy of "+highlightVisitor.getClass()+"; but got: "+cloned+" of "+cloned.getClass();
clones[i] = cloned;
}
highlightVisitors = clones;
}
return highlightVisitors;
}
@NotNull
private HighlightVisitor[] filterVisitors(@NotNull HighlightVisitor[] highlightVisitors, @NotNull PsiFile psiFile) {
final List<HighlightVisitor> visitors = new ArrayList<HighlightVisitor>(highlightVisitors.length);
List<HighlightVisitor> list = Arrays.asList(highlightVisitors);
for (HighlightVisitor visitor : DumbService.getInstance(myProject).filterByDumbAwareness(list)) {
if (visitor.suitableForFile(psiFile)) {
visitors.add(visitor);
}
}
if (visitors.isEmpty()) {
LOG.error("No visitors registered. list=" +
list +
"; all visitors are:" +
Arrays.asList(Extensions.getExtensions(HighlightVisitor.EP_HIGHLIGHT_VISITOR, myProject)));
}
return visitors.toArray(new HighlightVisitor[visitors.size()]);
}
public void setHighlightVisitorProducer(@NotNull NotNullProducer<HighlightVisitor[]> highlightVisitorProducer) {
myHighlightVisitorProducer = highlightVisitorProducer;
}
@NotNull
protected HighlightVisitor[] getHighlightVisitors(@NotNull PsiFile psiFile) {
return filterVisitors(myHighlightVisitorProducer.produce(), psiFile);
}
// returns old value
public int incVisitorUsageCount(int delta) {
AtomicInteger count = myProject.getUserData(HIGHLIGHT_VISITOR_INSTANCE_COUNT);
if (count == null) {
count = ((UserDataHolderEx)myProject).putUserDataIfAbsent(HIGHLIGHT_VISITOR_INSTANCE_COUNT, new AtomicInteger(0));
}
int old = count.getAndAdd(delta);
assert old + delta >= 0 : old +";" + delta;
return old;
}
@Override
protected void collectInformationWithProgress(@NotNull final ProgressIndicator progress) {
final List<HighlightInfo> outsideResult = new ArrayList<HighlightInfo>(100);
final List<HighlightInfo> insideResult = new ArrayList<HighlightInfo>(100);
final DaemonCodeAnalyzerEx daemonCodeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(myProject);
final HighlightVisitor[] filteredVisitors = getHighlightVisitors(myFile);
final List<PsiElement> insideElements = new ArrayList<PsiElement>();
final List<PsiElement> outsideElements = new ArrayList<PsiElement>();
try {
List<ProperTextRange> insideRanges = new ArrayList<ProperTextRange>();
List<ProperTextRange> outsideRanges = new ArrayList<ProperTextRange>();
Divider.divideInsideAndOutside(myFile, myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset(), myPriorityRange, insideElements, insideRanges, outsideElements,
outsideRanges, false, SHOULD_HIGHIGHT_FILTER);
// put file element always in outsideElements
if (!insideElements.isEmpty() && insideElements.get(insideElements.size()-1) instanceof PsiFile) {
PsiElement file = insideElements.remove(insideElements.size() - 1);
outsideElements.add(file);
ProperTextRange range = insideRanges.remove(insideRanges.size() - 1);
outsideRanges.add(range);
}
setProgressLimit((long)(insideElements.size()+outsideElements.size()));
final boolean forceHighlightParents = forceHighlightParents();
if (!isDumbMode()) {
highlightTodos(myFile, myDocument.getCharsSequence(), myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset(), progress, myPriorityRange, insideResult,
outsideResult);
}
boolean success = collectHighlights(insideElements, insideRanges, outsideElements, outsideRanges, progress, filteredVisitors, insideResult, outsideResult, forceHighlightParents);
if (success) {
myHighlightInfoProcessor.highlightsOutsideVisiblePartAreProduced(myHighlightingSession, outsideResult, myPriorityRange,
myRestrictRange,
getId());
if (myUpdateAll) {
daemonCodeAnalyzer.getFileStatusMap().setErrorFoundFlag(myProject, myDocument, myErrorFound);
}
}
else {
cancelAndRestartDaemonLater(progress, myProject);
}
}
finally {
incVisitorUsageCount(-1);
myHighlights.addAll(insideResult);
myHighlights.addAll(outsideResult);
}
}
protected boolean isFailFastOnAcquireReadAction() {
return true;
}
private boolean isWholeFileHighlighting() {
return myUpdateAll && myRestrictRange.equalsToRange(0, myDocument.getTextLength());
}
@Override
protected void applyInformationWithProgress() {
myFile.putUserData(HAS_ERROR_ELEMENT, myHasErrorElement);
if (myUpdateAll) {
reportErrorsToWolf();
}
}
@Override
@NotNull
public List<HighlightInfo> getInfos() {
return new ArrayList<HighlightInfo>(myHighlights);
}
private boolean collectHighlights(@NotNull final List<PsiElement> elements1,
@NotNull final List<ProperTextRange> ranges1,
@NotNull final List<PsiElement> elements2,
@NotNull final List<ProperTextRange> ranges2,
@NotNull final ProgressIndicator progress,
@NotNull final HighlightVisitor[] visitors,
@NotNull final List<HighlightInfo> insideResult,
@NotNull final List<HighlightInfo> outsideResult,
final boolean forceHighlightParents) {
final Set<PsiElement> skipParentsSet = new THashSet<PsiElement>();
// TODO - add color scheme to holder
final HighlightInfoHolder holder = createInfoHolder(myFile);
final int chunkSize = Math.max(1, (elements1.size()+elements2.size()) / 100); // one percent precision is enough
boolean success = analyzeByVisitors(visitors, holder, 0, new Runnable() {
@Override
public void run() {
runVisitors(elements1, ranges1, chunkSize, progress, skipParentsSet, holder, insideResult, outsideResult, forceHighlightParents, visitors);
final TextRange priorityIntersection = myPriorityRange.intersection(myRestrictRange);
if ((!elements1.isEmpty() || !insideResult.isEmpty()) && priorityIntersection != null) { // do not apply when there were no elements to highlight
myHighlightInfoProcessor.highlightsInsideVisiblePartAreProduced(myHighlightingSession, insideResult, myPriorityRange, myRestrictRange, getId());
}
runVisitors(elements2, ranges2, chunkSize, progress, skipParentsSet, holder, insideResult, outsideResult, forceHighlightParents, visitors);
}
});
List<HighlightInfo> postInfos = new ArrayList<HighlightInfo>(holder.size());
// there can be extra highlights generated in PostHighlightVisitor
for (int j = 0; j < holder.size(); j++) {
final HighlightInfo info = holder.get(j);
assert info != null;
postInfos.add(info);
}
myHighlightInfoProcessor.highlightsInsideVisiblePartAreProduced(myHighlightingSession, postInfos, myFile.getTextRange(), myFile.getTextRange(), POST_UPDATE_ALL);
return success;
}
private boolean analyzeByVisitors(@NotNull final HighlightVisitor[] visitors,
@NotNull final HighlightInfoHolder holder,
final int i,
@NotNull final Runnable action) {
final boolean[] success = {true};
if (i == visitors.length) {
action.run();
}
else {
if (!visitors[i].analyze(myFile, myUpdateAll, holder, new Runnable() {
@Override
public void run() {
success[0] = analyzeByVisitors(visitors, holder, i + 1, action);
}
})) {
success[0] = false;
}
}
return success[0];
}
private void runVisitors(@NotNull List<PsiElement> elements,
@NotNull List<ProperTextRange> ranges,
int chunkSize,
@NotNull ProgressIndicator progress,
@NotNull Set<PsiElement> skipParentsSet,
@NotNull HighlightInfoHolder holder,
@NotNull List<HighlightInfo> insideResult,
@NotNull List<HighlightInfo> outsideResult,
boolean forceHighlightParents,
@NotNull HighlightVisitor[] visitors) {
Stack<TextRange> nestedRange = new Stack<TextRange>();
Stack<List<HighlightInfo>> nestedInfos = new Stack<List<HighlightInfo>>();
boolean failed = false;
int nextLimit = chunkSize;
for (int i = 0; i < elements.size(); i++) {
PsiElement element = elements.get(i);
progress.checkCanceled();
PsiElement parent = element.getParent();
if (element != myFile && !skipParentsSet.isEmpty() && element.getFirstChild() != null && skipParentsSet.contains(element)) {
skipParentsSet.add(parent);
continue;
}
boolean isErrorElement = element instanceof PsiErrorElement;
if (isErrorElement) {
myHasErrorElement = true;
}
for (HighlightVisitor visitor : visitors) {
try {
visitor.visit(element);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (IndexNotReadyException e) {
throw e;
}
catch (Exception e) {
if (!failed) {
LOG.error(e);
}
failed = true;
}
}
if (i == nextLimit) {
advanceProgress(chunkSize);
nextLimit = i + chunkSize;
}
TextRange elementRange = ranges.get(i);
List<HighlightInfo> infosForThisRange = holder.size() == 0 ? null : new ArrayList<HighlightInfo>(holder.size());
for (int j = 0; j < holder.size(); j++) {
final HighlightInfo info = holder.get(j);
if (!myRestrictRange.containsRange(info.getStartOffset(), info.getEndOffset())) continue;
List<HighlightInfo> result = myPriorityRange.containsRange(info.getStartOffset(), info.getEndOffset()) && !(element instanceof PsiFile) ? insideResult : outsideResult;
// have to filter out already obtained highlights
if (!result.add(info)) continue;
boolean isError = info.getSeverity() == HighlightSeverity.ERROR;
if (isError) {
if (!forceHighlightParents) {
skipParentsSet.add(parent);
}
myErrorFound = true;
}
// if this highlight info range is exactly the same as the element range we are visiting
// that means we can clear this highlight as soon as visitors won't produce any highlights during visiting the same range next time.
// We also know that we can remove syntax error element.
info.setBijective(elementRange.equalsToRange(info.startOffset, info.endOffset) || isErrorElement);
myHighlightInfoProcessor.infoIsAvailable(myHighlightingSession, info);
infosForThisRange.add(info);
}
holder.clear();
// include infos which we got while visiting nested elements with the same range
while (true) {
if (!nestedRange.isEmpty() && elementRange.contains(nestedRange.peek())) {
TextRange oldRange = nestedRange.pop();
List<HighlightInfo> oldInfos = nestedInfos.pop();
if (elementRange.equals(oldRange)) {
if (infosForThisRange == null) {
infosForThisRange = oldInfos;
}
else if (oldInfos != null) {
infosForThisRange.addAll(oldInfos);
}
}
}
else {
break;
}
}
nestedRange.push(elementRange);
nestedInfos.push(infosForThisRange);
// optimisation: this element range does not equal to its parent' range if next element in "ranges" range is different since we top-sorted elements there by ancestry
if (parent == null || i != ranges.size()-1 && !elementRange.equals(ranges.get(i+1)) || !Comparing.equal(elementRange, parent.getTextRange())) {
myHighlightInfoProcessor.allHighlightsForRangeAreProduced(myHighlightingSession, elementRange, infosForThisRange);
}
}
advanceProgress(elements.size() - (nextLimit-chunkSize));
}
private static final int POST_UPDATE_ALL = 5;
private static void cancelAndRestartDaemonLater(@NotNull ProgressIndicator progress,
@NotNull final Project project) throws ProcessCanceledException {
progress.cancel();
JobScheduler.getScheduler().schedule(new Runnable() {
@Override
public void run() {
Application application = ApplicationManager.getApplication();
if (!project.isDisposed() && !application.isDisposed() && !application.isUnitTestMode()) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
DaemonCodeAnalyzer.getInstance(project).restart();
}
}, project.getDisposed());
}
}
}, RESTART_DAEMON_RANDOM.nextInt(100), TimeUnit.MILLISECONDS);
throw new ProcessCanceledException();
}
private boolean forceHighlightParents() {
boolean forceHighlightParents = false;
for(HighlightRangeExtension extension: Extensions.getExtensions(HighlightRangeExtension.EP_NAME)) {
if (extension.isForceHighlightParents(myFile)) {
forceHighlightParents = true;
break;
}
}
return forceHighlightParents;
}
protected HighlightInfoHolder createInfoHolder(@NotNull PsiFile file) {
final HighlightInfoFilter[] filters = HighlightInfoFilter.EXTENSION_POINT_NAME.getExtensions();
return new CustomHighlightInfoHolder(file, getColorsScheme(), filters);
}
static void highlightTodos(@NotNull PsiFile file,
@NotNull CharSequence text,
int startOffset,
int endOffset,
@NotNull ProgressIndicator progress,
@NotNull ProperTextRange priorityRange,
@NotNull Collection<HighlightInfo> insideResult,
@NotNull Collection<HighlightInfo> outsideResult) {
PsiTodoSearchHelper helper = PsiTodoSearchHelper.SERVICE.getInstance(file.getProject());
if (helper == null) return;
TodoItem[] todoItems = helper.findTodoItems(file, startOffset, endOffset);
if (todoItems.length == 0) return;
for (TodoItem todoItem : todoItems) {
progress.checkCanceled();
TextRange range = todoItem.getTextRange();
String description = text.subSequence(range.getStartOffset(), range.getEndOffset()).toString();
TextAttributes attributes = todoItem.getPattern().getAttributes().getTextAttributes();
HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.TODO).range(range);
builder.textAttributes(attributes);
builder.descriptionAndTooltip(description);
HighlightInfo info = builder.createUnconditionally();
(priorityRange.containsRange(info.getStartOffset(), info.getEndOffset()) ? insideResult : outsideResult).add(info);
}
}
private void reportErrorsToWolf() {
if (!myFile.getViewProvider().isPhysical()) return; // e.g. errors in evaluate expression
Project project = myFile.getProject();
if (!PsiManager.getInstance(project).isInProject(myFile)) return; // do not report problems in libraries
VirtualFile file = myFile.getVirtualFile();
if (file == null) return;
List<Problem> problems = convertToProblems(getInfos(), file, myHasErrorElement);
WolfTheProblemSolver wolf = WolfTheProblemSolver.getInstance(project);
boolean hasErrors = DaemonCodeAnalyzerEx.hasErrors(project, getDocument());
if (!hasErrors || isWholeFileHighlighting()) {
wolf.reportProblems(file, problems);
}
else {
wolf.weHaveGotProblems(file, problems);
}
}
@Override
public double getProgress() {
// do not show progress of visible highlighters update
return myUpdateAll ? super.getProgress() : -1;
}
private static List<Problem> convertToProblems(@NotNull Collection<HighlightInfo> infos,
@NotNull VirtualFile file,
final boolean hasErrorElement) {
List<Problem> problems = new SmartList<Problem>();
for (HighlightInfo info : infos) {
if (info.getSeverity() == HighlightSeverity.ERROR) {
Problem problem = new ProblemImpl(file, info, hasErrorElement);
problems.add(problem);
}
}
return problems;
}
@Override
public String toString() {
return super.toString() + " updateAll="+myUpdateAll+" range= "+myRestrictRange;
}
}
| |
package com.vumobile.service.utils;
/**
* Created by toukirul on 20/3/2017.
*/
import android.app.ActivityManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Build;
import android.support.v4.app.NotificationCompat;
import android.text.Html;
import android.text.TextUtils;
import android.util.Patterns;
import com.vumobile.Config.Api;
import com.vumobile.celeb.R;
import com.vumobile.celeb.ui.CelebrityProfileActivity;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
/**
* Created by Ravi on 31/03/15.
*/
@SuppressWarnings("ALL")
public class NotificationUtils {
private static String TAG = NotificationUtils.class.getSimpleName();
private Context mContext;
public NotificationUtils(Context mContext) {
this.mContext = mContext;
}
public void showNotificationMessage(String title, String message, String timeStamp, Intent intent) {
showNotificationMessage(title, message, timeStamp, intent, null);
}
public void showNotificationMessage(final String title, final String message, final String timeStamp, Intent intent, String imageUrl) {
// Check for empty push message
if (TextUtils.isEmpty(message))
return;
// notification icon
final int icon = R.drawable.ic_launcher;
// fired after user click notification
// here we put messages into intent for passing data to another activity for showing
intent = new Intent(mContext,CelebrityProfileActivity.class);
intent.putExtra("message",message);
intent.putExtra("imageUrl",imageUrl);
intent.putExtra("time_stamp",timeStamp);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
final PendingIntent resultPendingIntent =
PendingIntent.getActivity(
mContext,
0,
intent,
PendingIntent.FLAG_CANCEL_CURRENT
);
final NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(
mContext);
final Uri alarmSound = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE
+ "://" + mContext.getPackageName() + "/raw/notification");
if (!TextUtils.isEmpty(imageUrl)) {
if (imageUrl != null && imageUrl.length() > 4 && Patterns.WEB_URL.matcher(imageUrl).matches()) {
Bitmap bitmap = getBitmapFromURL(imageUrl);
if (bitmap != null) {
showBigNotification(bitmap, mBuilder, icon, title, message, timeStamp, resultPendingIntent, alarmSound);
} else {
showSmallNotification(mBuilder, icon, title, message, timeStamp, resultPendingIntent, alarmSound);
}
}
} else {
showSmallNotification(mBuilder, icon, title, message, timeStamp, resultPendingIntent, alarmSound);
playNotificationSound();
}
}
private void showSmallNotification(NotificationCompat.Builder mBuilder, int icon, String title, String message, String timeStamp, PendingIntent resultPendingIntent, Uri alarmSound) {
NotificationCompat.InboxStyle inboxStyle = new NotificationCompat.InboxStyle();
inboxStyle.addLine(message);
Notification notification;
notification = mBuilder.setSmallIcon(icon).setTicker(title).setWhen(0)
.setAutoCancel(true)
.setContentTitle(title)
.setContentIntent(resultPendingIntent)
.setSound(alarmSound)
.setStyle(inboxStyle)
.setWhen(getTimeMilliSec(timeStamp))
.setSmallIcon(R.mipmap.ic_launcher)
.setLargeIcon(BitmapFactory.decodeResource(mContext.getResources(), icon))
.setContentText(message)
.build();
NotificationManager notificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(Api.NOTIFICATION_ID, notification);
}
private void showBigNotification(Bitmap bitmap, NotificationCompat.Builder mBuilder, int icon, String title, String message, String timeStamp, PendingIntent resultPendingIntent, Uri alarmSound) {
NotificationCompat.BigPictureStyle bigPictureStyle = new NotificationCompat.BigPictureStyle();
bigPictureStyle.setBigContentTitle(title);
bigPictureStyle.setSummaryText(Html.fromHtml(message).toString());
bigPictureStyle.bigPicture(bitmap);
Notification notification;
notification = mBuilder.setSmallIcon(icon).setTicker(title).setWhen(0)
.setAutoCancel(true)
.setContentTitle(title)
.setContentIntent(resultPendingIntent)
.setSound(alarmSound)
.setStyle(bigPictureStyle)
.setWhen(getTimeMilliSec(timeStamp))
.setSmallIcon(R.drawable.ic_launcher)
.setLargeIcon(BitmapFactory.decodeResource(mContext.getResources(), icon))
.setContentText(message)
.build();
NotificationManager notificationManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(Api.NOTIFICATION_ID_BIG_IMAGE, notification);
}
/**
* Downloading push notification image before displaying it in
* the notification tray
*/
public Bitmap getBitmapFromURL(String strURL) {
try {
URL url = new URL(strURL);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream();
Bitmap myBitmap = BitmapFactory.decodeStream(input);
return myBitmap;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
// Playing notification sound
public void playNotificationSound() {
try {
Uri alarmSound = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE
+ "://" + mContext.getPackageName() + "/raw/notification");
Ringtone r = RingtoneManager.getRingtone(mContext, alarmSound);
r.play();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Method checks if the app is in background or not
*/
public static boolean isAppIsInBackground(Context context) {
boolean isInBackground = true;
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.KITKAT_WATCH) {
List<ActivityManager.RunningAppProcessInfo> runningProcesses = am.getRunningAppProcesses();
for (ActivityManager.RunningAppProcessInfo processInfo : runningProcesses) {
if (processInfo.importance == ActivityManager.RunningAppProcessInfo.IMPORTANCE_FOREGROUND) {
for (String activeProcess : processInfo.pkgList) {
if (activeProcess.equals(context.getPackageName())) {
isInBackground = false;
}
}
}
}
} else {
List<ActivityManager.RunningTaskInfo> taskInfo = am.getRunningTasks(1);
ComponentName componentInfo = taskInfo.get(0).topActivity;
if (componentInfo.getPackageName().equals(context.getPackageName())) {
isInBackground = false;
}
}
return isInBackground;
}
// Clears notification tray messages
public static void clearNotifications(Context context) {
NotificationManager notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.cancelAll();
}
public static long getTimeMilliSec(String timeStamp) {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
Date date = format.parse(timeStamp);
return date.getTime();
} catch (ParseException e) {
e.printStackTrace();
}
return 0;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import java.io.File;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler;
import io.netty.handler.codec.Delimiters;
import io.netty.handler.ssl.SslHandler;
import io.netty.util.CharsetUtil;
import org.apache.camel.LoggingLevel;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.util.EndpointHelper;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@UriParams
public class NettyConfiguration extends NettyServerBootstrapConfiguration implements Cloneable {
private static final Logger LOG = LoggerFactory.getLogger(NettyConfiguration.class);
@UriParam(label = "producer")
private long requestTimeout;
@UriParam(defaultValue = "true")
private boolean sync = true;
@UriParam
private boolean textline;
@UriParam(defaultValue = "LINE")
private TextLineDelimiter delimiter = TextLineDelimiter.LINE;
@UriParam(defaultValue = "true")
private boolean autoAppendDelimiter = true;
@UriParam(defaultValue = "1024")
private int decoderMaxLineLength = 1024;
@UriParam
private String encoding;
private List<ChannelHandler> encoders = new ArrayList<ChannelHandler>();
private List<ChannelHandler> decoders = new ArrayList<ChannelHandler>();
@UriParam
private boolean disconnect;
@UriParam(label = "producer,advanced", defaultValue = "true")
private boolean lazyChannelCreation = true;
@UriParam(label = "advanced")
private boolean transferExchange;
@UriParam(label = "consumer,advanced", defaultValue = "true")
private boolean disconnectOnNoReply = true;
@UriParam(label = "consumer,advanced", defaultValue = "WARN")
private LoggingLevel noReplyLogLevel = LoggingLevel.WARN;
@UriParam(label = "consumer,advanced", defaultValue = "WARN")
private LoggingLevel serverExceptionCaughtLogLevel = LoggingLevel.WARN;
@UriParam(label = "consumer,advanced", defaultValue = "DEBUG")
private LoggingLevel serverClosedChannelExceptionCaughtLogLevel = LoggingLevel.DEBUG;
@UriParam(defaultValue = "true")
private boolean allowDefaultCodec = true;
@UriParam(label = "producer,advanced")
private ClientInitializerFactory clientInitializerFactory;
@UriParam(label = "consumer,advanced", defaultValue = "16")
private int maximumPoolSize = 16;
@UriParam(label = "consumer,advanced", defaultValue = "true")
private boolean usingExecutorService = true;
@UriParam(label = "producer,advanced", defaultValue = "-1")
private int producerPoolMaxActive = -1;
@UriParam(label = "producer,advanced")
private int producerPoolMinIdle;
@UriParam(label = "producer,advanced", defaultValue = "100")
private int producerPoolMaxIdle = 100;
@UriParam(label = "producer,advanced", defaultValue = "" + 5 * 60 * 1000L)
private long producerPoolMinEvictableIdle = 5 * 60 * 1000L;
@UriParam(label = "producer,advanced", defaultValue = "true")
private boolean producerPoolEnabled = true;
@UriParam(label = "producer,advanced")
private boolean udpConnectionlessSending;
@UriParam(label = "consumer")
private boolean clientMode;
@UriParam(label = "producer,advanced")
private boolean useByteBuf;
@UriParam(label = "advanced")
private boolean udpByteArrayCodec;
/**
* Returns a copy of this configuration
*/
public NettyConfiguration copy() {
try {
NettyConfiguration answer = (NettyConfiguration) clone();
// make sure the lists is copied in its own instance
List<ChannelHandler> encodersCopy = new ArrayList<ChannelHandler>(encoders);
answer.setEncoders(encodersCopy);
List<ChannelHandler> decodersCopy = new ArrayList<ChannelHandler>(decoders);
answer.setDecoders(decodersCopy);
return answer;
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
public void validateConfiguration() {
// validate that the encoders is either shareable or is a handler factory
for (ChannelHandler encoder : encoders) {
if (encoder instanceof ChannelHandlerFactory) {
continue;
}
if (ObjectHelper.getAnnotation(encoder, ChannelHandler.Sharable.class) != null) {
continue;
}
LOG.warn("The encoder {} is not @Shareable or an ChannelHandlerFactory instance. The encoder cannot safely be used.", encoder);
}
// validate that the decoders is either shareable or is a handler factory
for (ChannelHandler decoder : decoders) {
if (decoder instanceof ChannelHandlerFactory) {
continue;
}
if (ObjectHelper.getAnnotation(decoder, ChannelHandler.Sharable.class) != null) {
continue;
}
LOG.warn("The decoder {} is not @Shareable or an ChannelHandlerFactory instance. The decoder cannot safely be used.", decoder);
}
if (sslHandler != null) {
boolean factory = sslHandler instanceof ChannelHandlerFactory;
boolean shareable = ObjectHelper.getAnnotation(sslHandler, ChannelHandler.Sharable.class) != null;
if (!factory && !shareable) {
LOG.warn("The sslHandler {} is not @Shareable or an ChannelHandlerFactory instance. The sslHandler cannot safely be used.", sslHandler);
}
}
}
public void parseURI(URI uri, Map<String, Object> parameters, NettyComponent component, String... supportedProtocols) throws Exception {
protocol = uri.getScheme();
boolean found = false;
for (String supportedProtocol : supportedProtocols) {
if (protocol != null && protocol.equalsIgnoreCase(supportedProtocol)) {
found = true;
break;
}
}
if (!found) {
throw new IllegalArgumentException("Unrecognized Netty protocol: " + protocol + " for uri: " + uri);
}
setHost(uri.getHost());
setPort(uri.getPort());
ssl = component.getAndRemoveOrResolveReferenceParameter(parameters, "ssl", boolean.class, false);
sslHandler = component.getAndRemoveOrResolveReferenceParameter(parameters, "sslHandler", SslHandler.class, sslHandler);
passphrase = component.getAndRemoveOrResolveReferenceParameter(parameters, "passphrase", String.class, passphrase);
keyStoreFormat = component.getAndRemoveOrResolveReferenceParameter(parameters, "keyStoreFormat", String.class, keyStoreFormat == null ? "JKS" : keyStoreFormat);
securityProvider = component.getAndRemoveOrResolveReferenceParameter(parameters, "securityProvider", String.class, securityProvider == null ? "SunX509" : securityProvider);
keyStoreFile = component.getAndRemoveOrResolveReferenceParameter(parameters, "keyStoreFile", File.class, keyStoreFile);
trustStoreFile = component.getAndRemoveOrResolveReferenceParameter(parameters, "trustStoreFile", File.class, trustStoreFile);
keyStoreResource = component.getAndRemoveOrResolveReferenceParameter(parameters, "keyStoreResource", String.class, keyStoreResource);
trustStoreResource = component.getAndRemoveOrResolveReferenceParameter(parameters, "trustStoreResource", String.class, trustStoreResource);
// clientPipelineFactory is @deprecated and to be removed
clientInitializerFactory = component.getAndRemoveOrResolveReferenceParameter(parameters, "clientPipelineFactory", ClientInitializerFactory.class, clientInitializerFactory);
clientInitializerFactory = component.getAndRemoveOrResolveReferenceParameter(parameters, "clientInitializerFactory", ClientInitializerFactory.class, clientInitializerFactory);
// serverPipelineFactory is @deprecated and to be removed
serverInitializerFactory = component.getAndRemoveOrResolveReferenceParameter(parameters, "serverPipelineFactory", ServerInitializerFactory.class, serverInitializerFactory);
serverInitializerFactory = component.getAndRemoveOrResolveReferenceParameter(parameters, "serverInitializerFactory", ServerInitializerFactory.class, serverInitializerFactory);
// set custom encoders and decoders first
List<ChannelHandler> referencedEncoders = component.resolveAndRemoveReferenceListParameter(parameters, "encoders", ChannelHandler.class, null);
addToHandlersList(encoders, referencedEncoders, ChannelHandler.class);
List<ChannelHandler> referencedDecoders = component.resolveAndRemoveReferenceListParameter(parameters, "decoders", ChannelHandler.class, null);
addToHandlersList(decoders, referencedDecoders, ChannelHandler.class);
// then set parameters with the help of the camel context type converters
EndpointHelper.setReferenceProperties(component.getCamelContext(), this, parameters);
EndpointHelper.setProperties(component.getCamelContext(), this, parameters);
// additional netty options, we don't want to store an empty map, so set it as null if empty
options = IntrospectionSupport.extractProperties(parameters, "option.");
if (options != null && options.isEmpty()) {
options = null;
}
// add default encoders and decoders
if (encoders.isEmpty() && decoders.isEmpty()) {
if (isAllowDefaultCodec()) {
if ("udp".equalsIgnoreCase(protocol)) {
encoders.add(ChannelHandlerFactories.newDatagramPacketEncoder());
}
// are we textline or object?
if (isTextline()) {
Charset charset = getEncoding() != null ? Charset.forName(getEncoding()) : CharsetUtil.UTF_8;
encoders.add(ChannelHandlerFactories.newStringEncoder(charset, protocol));
ByteBuf[] delimiters = delimiter == TextLineDelimiter.LINE ? Delimiters.lineDelimiter() : Delimiters.nulDelimiter();
decoders.add(ChannelHandlerFactories.newDelimiterBasedFrameDecoder(decoderMaxLineLength, delimiters, protocol));
decoders.add(ChannelHandlerFactories.newStringDecoder(charset, protocol));
if (LOG.isDebugEnabled()) {
LOG.debug("Using textline encoders and decoders with charset: {}, delimiter: {} and decoderMaxLineLength: {}",
new Object[]{charset, delimiter, decoderMaxLineLength});
}
} else if ("udp".equalsIgnoreCase(protocol) && isUdpByteArrayCodec()) {
encoders.add(ChannelHandlerFactories.newByteArrayEncoder(protocol));
decoders.add(ChannelHandlerFactories.newByteArrayDecoder(protocol));
} else {
// object serializable is then used
encoders.add(ChannelHandlerFactories.newObjectEncoder(protocol));
decoders.add(ChannelHandlerFactories.newObjectDecoder(protocol));
LOG.debug("Using object encoders and decoders");
}
if ("udp".equalsIgnoreCase(protocol)) {
decoders.add(ChannelHandlerFactories.newDatagramPacketDecoder());
}
} else {
LOG.debug("No encoders and decoders will be used");
}
} else {
LOG.debug("Using configured encoders and/or decoders");
}
}
public String getCharsetName() {
if (encoding == null) {
return null;
}
if (!Charset.isSupported(encoding)) {
throw new IllegalArgumentException("The encoding: " + encoding + " is not supported");
}
return Charset.forName(encoding).name();
}
public long getRequestTimeout() {
return requestTimeout;
}
/**
* Allows to use a timeout for the Netty producer when calling a remote server.
* By default no timeout is in use. The value is in milli seconds, so eg 30000 is 30 seconds.
* The requestTimeout is using Netty's ReadTimeoutHandler to trigger the timeout.
*/
public void setRequestTimeout(long requestTimeout) {
this.requestTimeout = requestTimeout;
}
public boolean isSync() {
return sync;
}
/**
* Setting to set endpoint as one-way or request-response
*/
public void setSync(boolean sync) {
this.sync = sync;
}
public boolean isTextline() {
return textline;
}
/**
* Only used for TCP. If no codec is specified, you can use this flag to indicate a text line based codec;
* if not specified or the value is false, then Object Serialization is assumed over TCP.
*/
public void setTextline(boolean textline) {
this.textline = textline;
}
public int getDecoderMaxLineLength() {
return decoderMaxLineLength;
}
/**
* The max line length to use for the textline codec.
*/
public void setDecoderMaxLineLength(int decoderMaxLineLength) {
this.decoderMaxLineLength = decoderMaxLineLength;
}
public TextLineDelimiter getDelimiter() {
return delimiter;
}
/**
* The delimiter to use for the textline codec. Possible values are LINE and NULL.
*/
public void setDelimiter(TextLineDelimiter delimiter) {
this.delimiter = delimiter;
}
public boolean isAutoAppendDelimiter() {
return autoAppendDelimiter;
}
/**
* Whether or not to auto append missing end delimiter when sending using the textline codec.
*/
public void setAutoAppendDelimiter(boolean autoAppendDelimiter) {
this.autoAppendDelimiter = autoAppendDelimiter;
}
public String getEncoding() {
return encoding;
}
/**
* The encoding (a charset name) to use for the textline codec. If not provided, Camel will use the JVM default Charset.
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public List<ChannelHandler> getDecoders() {
return decoders;
}
/**
* A list of decoders to be used.
* You can use a String which have values separated by comma, and have the values be looked up in the Registry.
* Just remember to prefix the value with # so Camel knows it should lookup.
*/
public void setDecoders(List<ChannelHandler> decoders) {
this.decoders = decoders;
}
public List<ChannelHandler> getEncoders() {
return encoders;
}
/**
* A list of encoders to be used. You can use a String which have values separated by comma, and have the values be looked up in the Registry.
* Just remember to prefix the value with # so Camel knows it should lookup.
*/
public void setEncoders(List<ChannelHandler> encoders) {
this.encoders = encoders;
}
public ChannelHandler getEncoder() {
return encoders.isEmpty() ? null : encoders.get(0);
}
/**
* A custom ChannelHandler class that can be used to perform special marshalling of outbound payloads.
*/
public void setEncoder(ChannelHandler encoder) {
if (!encoders.contains(encoder)) {
encoders.add(encoder);
}
}
public ChannelHandler getDecoder() {
return decoders.isEmpty() ? null : decoders.get(0);
}
/**
* A custom ChannelHandler class that can be used to perform special marshalling of inbound payloads.
*/
public void setDecoder(ChannelHandler decoder) {
if (!decoders.contains(decoder)) {
decoders.add(decoder);
}
}
public boolean isDisconnect() {
return disconnect;
}
/**
* Whether or not to disconnect(close) from Netty Channel right after use. Can be used for both consumer and producer.
*/
public void setDisconnect(boolean disconnect) {
this.disconnect = disconnect;
}
public boolean isLazyChannelCreation() {
return lazyChannelCreation;
}
/**
* Channels can be lazily created to avoid exceptions, if the remote server is not up and running when the Camel producer is started.
*/
public void setLazyChannelCreation(boolean lazyChannelCreation) {
this.lazyChannelCreation = lazyChannelCreation;
}
public boolean isTransferExchange() {
return transferExchange;
}
/**
* Only used for TCP. You can transfer the exchange over the wire instead of just the body.
* The following fields are transferred: In body, Out body, fault body, In headers, Out headers, fault headers,
* exchange properties, exchange exception.
* This requires that the objects are serializable. Camel will exclude any non-serializable objects and log it at WARN level.
*/
public void setTransferExchange(boolean transferExchange) {
this.transferExchange = transferExchange;
}
public boolean isDisconnectOnNoReply() {
return disconnectOnNoReply;
}
/**
* If sync is enabled then this option dictates NettyConsumer if it should disconnect where there is no reply to send back.
*/
public void setDisconnectOnNoReply(boolean disconnectOnNoReply) {
this.disconnectOnNoReply = disconnectOnNoReply;
}
public LoggingLevel getNoReplyLogLevel() {
return noReplyLogLevel;
}
/**
* If sync is enabled this option dictates NettyConsumer which logging level to use when logging a there is no reply to send back.
*/
public void setNoReplyLogLevel(LoggingLevel noReplyLogLevel) {
this.noReplyLogLevel = noReplyLogLevel;
}
public LoggingLevel getServerExceptionCaughtLogLevel() {
return serverExceptionCaughtLogLevel;
}
/**
* If the server (NettyConsumer) catches an exception then its logged using this logging level.
*/
public void setServerExceptionCaughtLogLevel(LoggingLevel serverExceptionCaughtLogLevel) {
this.serverExceptionCaughtLogLevel = serverExceptionCaughtLogLevel;
}
public LoggingLevel getServerClosedChannelExceptionCaughtLogLevel() {
return serverClosedChannelExceptionCaughtLogLevel;
}
/**
* If the server (NettyConsumer) catches an java.nio.channels.ClosedChannelException then its logged using this logging level.
* This is used to avoid logging the closed channel exceptions, as clients can disconnect abruptly and then cause a flood of closed exceptions in the Netty server.
*/
public void setServerClosedChannelExceptionCaughtLogLevel(LoggingLevel serverClosedChannelExceptionCaughtLogLevel) {
this.serverClosedChannelExceptionCaughtLogLevel = serverClosedChannelExceptionCaughtLogLevel;
}
public boolean isAllowDefaultCodec() {
return allowDefaultCodec;
}
/**
* The netty component installs a default codec if both, encoder/deocder is null and textline is false.
* Setting allowDefaultCodec to false prevents the netty component from installing a default codec as the first element in the filter chain.
*/
public void setAllowDefaultCodec(boolean allowDefaultCodec) {
this.allowDefaultCodec = allowDefaultCodec;
}
/**
* @deprecated use #setClientInitializerFactory
*/
@Deprecated
public void setClientPipelineFactory(ClientInitializerFactory clientPipelineFactory) {
this.clientInitializerFactory = clientPipelineFactory;
}
/**
* @deprecated use #getClientInitializerFactory
*/
@Deprecated
public ClientInitializerFactory getClientPipelineFactory() {
return clientInitializerFactory;
}
public ClientInitializerFactory getClientInitializerFactory() {
return clientInitializerFactory;
}
/**
* To use a custom ClientInitializerFactory
*/
public void setClientInitializerFactory(ClientInitializerFactory clientInitializerFactory) {
this.clientInitializerFactory = clientInitializerFactory;
}
public int getMaximumPoolSize() {
return maximumPoolSize;
}
/**
* The core pool size for the ordered thread pool, if its in use.
*/
public void setMaximumPoolSize(int maximumPoolSize) {
this.maximumPoolSize = maximumPoolSize;
}
public boolean isUsingExecutorService() {
return usingExecutorService;
}
/**
* Whether to use ordered thread pool, to ensure events are processed orderly on the same channel.
*/
public void setUsingExecutorService(boolean usingExecutorService) {
this.usingExecutorService = usingExecutorService;
}
public int getProducerPoolMaxActive() {
return producerPoolMaxActive;
}
/**
* Sets the cap on the number of objects that can be allocated by the pool
* (checked out to clients, or idle awaiting checkout) at a given time. Use a negative value for no limit.
*/
public void setProducerPoolMaxActive(int producerPoolMaxActive) {
this.producerPoolMaxActive = producerPoolMaxActive;
}
public int getProducerPoolMinIdle() {
return producerPoolMinIdle;
}
/**
* Sets the minimum number of instances allowed in the producer pool before the evictor thread (if active) spawns new objects.
*/
public void setProducerPoolMinIdle(int producerPoolMinIdle) {
this.producerPoolMinIdle = producerPoolMinIdle;
}
public int getProducerPoolMaxIdle() {
return producerPoolMaxIdle;
}
/**
* Sets the cap on the number of "idle" instances in the pool.
*/
public void setProducerPoolMaxIdle(int producerPoolMaxIdle) {
this.producerPoolMaxIdle = producerPoolMaxIdle;
}
public long getProducerPoolMinEvictableIdle() {
return producerPoolMinEvictableIdle;
}
/**
* Sets the minimum amount of time (value in millis) an object may sit idle in the pool before it is eligible for eviction by the idle object evictor.
*/
public void setProducerPoolMinEvictableIdle(long producerPoolMinEvictableIdle) {
this.producerPoolMinEvictableIdle = producerPoolMinEvictableIdle;
}
public boolean isProducerPoolEnabled() {
return producerPoolEnabled;
}
/**
* Whether producer pool is enabled or not.
* Important: Do not turn this off, as the pooling is needed for handling concurrency and reliable request/reply.
*/
public void setProducerPoolEnabled(boolean producerPoolEnabled) {
this.producerPoolEnabled = producerPoolEnabled;
}
public boolean isUdpConnectionlessSending() {
return udpConnectionlessSending;
}
/**
* This option supports connection less udp sending which is a real fire and forget.
* A connected udp send receive the PortUnreachableException if no one is listen on the receiving port.
*/
public void setUdpConnectionlessSending(boolean udpConnectionlessSending) {
this.udpConnectionlessSending = udpConnectionlessSending;
}
public boolean isClientMode() {
return clientMode;
}
/**
* If the clientMode is true, netty consumer will connect the address as a TCP client.
*/
public void setClientMode(boolean clientMode) {
this.clientMode = clientMode;
}
public boolean isUseByteBuf() {
return useByteBuf;
}
/**
* If the useByteBuf is true, netty producer will turn the message body into {@link ByteBuf} before sending it out.
*/
public void setUseByteBuf(boolean useByteBuf) {
this.useByteBuf = useByteBuf;
}
public boolean isUdpByteArrayCodec() {
return udpByteArrayCodec;
}
/**
* For UDP only. If enabled the using byte array codec instead of Java serialization protocol.
*/
public void setUdpByteArrayCodec(boolean udpByteArrayCodec) {
this.udpByteArrayCodec = udpByteArrayCodec;
}
private static <T> void addToHandlersList(List<T> configured, List<T> handlers, Class<T> handlerType) {
if (handlers != null) {
for (T handler : handlers) {
if (handlerType.isInstance(handler)) {
configured.add(handler);
}
}
}
}
}
| |
package org.jgroups.protocols;
import org.jgroups.Address;
import org.jgroups.Event;
import org.jgroups.PhysicalAddress;
import org.jgroups.View;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.conf.AttributeType;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Failure detection protocol which detects the crash or hanging of entire hosts and suspects all cluster members
* on those hosts. By default InetAddress.isReachable() is used, but any script/command can be used for liveness checks
* by defining the 'cmd' property.
* <p/>
* FD_HOST does <em>not</em> detect the crash or hanging of single members on the local host, but only checks liveness
* of all other hosts in a cluster. Therefore it is meant to be used together with other failure detection protocols,
* e.g. {@link org.jgroups.protocols.FD_ALL3} and {@link org.jgroups.protocols.FD_SOCK}.
* <p/>
* This protocol would typically be used when multiple cluster members are running on the same physical box.
* <p/>
* JIRA: https://issues.jboss.org/browse/JGRP-1855
* @author Bela Ban
* @version 3.5, 3.4.5
*/
@MBean(description="Failure detection protocol which detects crashes or hangs of entire hosts and suspects " +
"all cluster members on those hosts")
public class FD_HOST extends Protocol {
@Property(description="The command used to check a given host for liveness. Example: \"ping\". " +
"If null, InetAddress.isReachable() will be used by default")
protected String cmd;
@Property(description="Max time (in ms) after which a host is suspected if it failed all liveness checks",
type=AttributeType.TIME)
protected long timeout=60000;
@Property(description="The interval (in ms) at which the hosts are checked for liveness",type=AttributeType.TIME)
protected long interval=20000;
@Property(description="Max time (in ms) that a liveness check for a single host can take",type=AttributeType.TIME)
protected long check_timeout=3000;
@Property(description="Uses TimeService to get the current time rather than System.currentTimeMillis. Might get " +
"removed soon, don't use !")
protected boolean use_time_service=true;
@ManagedAttribute(description="Number of liveness checks")
protected int num_liveness_checks;
@ManagedAttribute(description="Number of suspected events received")
protected int num_suspect_events;
protected final Set<Address> suspected_mbrs=new HashSet<>();
@ManagedAttribute(description="Shows whether there are currently any suspected members")
protected volatile boolean has_suspected_mbrs;
protected final BoundedList<Tuple<InetAddress,Long>> suspect_history=new BoundedList<>(20);
protected InetAddress local_host;
protected final List<Address> members=new ArrayList<>();
/** The command to detect whether a target is alive */
protected PingCommand ping_command=new IsReachablePingCommand();
/** Map of hosts and their cluster members, updated on view changes. Used to suspect all members
of a suspected host */
protected final Map<InetAddress,List<Address>> hosts=new HashMap<>();
// Map of hosts and timestamps of last updates (ns)
protected final ConcurrentMap<InetAddress, Long> timestamps=new ConcurrentHashMap<>();
/** Timer used to run the ping task on */
protected TimeScheduler timer;
protected TimeService time_service;
protected Future<?> ping_task_future;
public FD_HOST pingCommand(PingCommand cmd) {this.ping_command=cmd; return this;}
public void resetStats() {
num_suspect_events=num_liveness_checks=0;
suspect_history.clear();
}
public FD_HOST setCommand(String command) {
this.cmd=command;
ping_command=this.cmd != null? new ExternalPingCommand(cmd) : new IsReachablePingCommand();
return this;
}
@ManagedOperation(description="Prints history of suspected hosts")
public String printSuspectHistory() {
StringBuilder sb=new StringBuilder();
for(Tuple<InetAddress,Long> tmp: suspect_history) {
sb.append(new Date(tmp.getVal2())).append(": ").append(tmp.getVal1()).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Prints timestamps")
public String printTimestamps() {
return _printTimestamps();
}
@ManagedAttribute(description="Whether the ping task is running")
public boolean isPingerRunning() {
Future<?> future=ping_task_future;
return future != null && !future.isDone();
}
@ManagedOperation(description="Prints the hosts and their associated cluster members")
public String printHosts() {
StringBuilder sb=new StringBuilder();
synchronized(hosts) {
for(Map.Entry<InetAddress,List<Address>> entry: hosts.entrySet()) {
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
}
return sb.toString();
}
@ManagedOperation(description="Checks whether the given host is alive")
public boolean isAlive(String host) throws Exception {
return ping_command != null && ping_command.isAlive(InetAddress.getByName(host), check_timeout);
}
@ManagedAttribute(description="Currently suspected members")
public String getSuspectedMembers() {return suspected_mbrs.toString();}
public void init() throws Exception {
if(interval >= timeout)
throw new IllegalArgumentException("interval (" + interval + ") has to be less than timeout (" + timeout + ")");
super.init();
if(cmd != null)
ping_command=new ExternalPingCommand(cmd);
timer=getTransport().getTimer();
if(timer == null)
throw new Exception("timer not set");
time_service=getTransport().getTimeService();
if(time_service == null)
log.warn("%s: time service is not available, using System.currentTimeMillis() instead", local_addr);
else {
if(time_service.interval() > timeout) {
log.warn("%s: interval of time service (%d) is greater than timeout (%d), disabling time service",
local_addr, time_service.interval(), timeout);
use_time_service=false;
}
}
suspected_mbrs.clear();
has_suspected_mbrs=false;
}
public void stop() {
super.stop();
stopPingerTask();
suspected_mbrs.clear();
has_suspected_mbrs=false;
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.VIEW_CHANGE:
View view=evt.getArg();
handleView(view);
break;
case Event.CONNECT:
case Event.CONNECT_USE_FLUSH:
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
local_host=getHostFor(local_addr);
break;
case Event.DISCONNECT:
Object retval=down_prot.down(evt);
local_host=null;
return retval;
case Event.UNSUSPECT:
Address mbr=evt.getArg();
unsuspect(mbr);
break;
}
return down_prot.down(evt);
}
protected void handleView(View view) {
List<Address> view_mbrs=view.getMembers();
boolean is_pinger=false;
members.clear();
members.addAll(view_mbrs);
Collection<InetAddress> current_hosts=null;
synchronized(hosts) {
hosts.clear();
for(Address mbr: view_mbrs) {
InetAddress key=getHostFor(mbr);
if(key == null)
continue;
List<Address> mbrs=hosts.computeIfAbsent(key, k -> new ArrayList<>());
mbrs.add(mbr);
}
is_pinger=isPinger(local_addr);
current_hosts=new ArrayList<>(hosts.keySet());
}
if(suspected_mbrs.retainAll(view.getMembers()))
has_suspected_mbrs=!suspected_mbrs.isEmpty();
timestamps.keySet().retainAll(current_hosts);
current_hosts.remove(local_host);
for(InetAddress host: current_hosts)
timestamps.putIfAbsent(host, getTimestamp());
if(is_pinger)
startPingerTask();
else {
stopPingerTask();
timestamps.clear();
}
}
protected PhysicalAddress getPhysicalAddress(Address logical_addr) {
return (PhysicalAddress)down(new Event(Event.GET_PHYSICAL_ADDRESS, logical_addr));
}
protected InetAddress getHostFor(Address mbr) {
PhysicalAddress phys_addr=getPhysicalAddress(mbr);
return phys_addr instanceof IpAddress? ((IpAddress)phys_addr).getIpAddress() : null;
}
protected boolean isPinger(Address mbr) {
InetAddress host=getHostFor(mbr);
if(host == null) return false; // should not happen
List<Address> mbrs=hosts.get(host);
return mbrs != null && !mbrs.isEmpty() && mbrs.get(0).equals(mbr);
}
protected void startPingerTask() {
if(ping_task_future == null || ping_task_future.isDone())
ping_task_future=timer.scheduleAtFixedRate(new PingTask(), interval, interval, TimeUnit.MILLISECONDS);
}
protected void stopPingerTask() {
if(ping_task_future != null) {
ping_task_future.cancel(false);
ping_task_future=null;
}
}
/** Called by ping task; will result in all members of host getting suspected */
protected void suspect(InetAddress host) {
List<Address> suspects;
suspect_history.add(new Tuple<>(host, System.currentTimeMillis())); // we need wall clock time here
synchronized(hosts) {
List<Address> tmp=hosts.get(host);
suspects=tmp != null? new ArrayList<>(tmp) : null;
}
if(suspects != null) {
log.debug("%s: suspecting host %s; suspected members: %s", local_addr, host, Util.printListWithDelimiter(suspects, ","));
suspect(suspects);
}
}
protected void suspect(List<Address> suspects) {
if(suspects == null || suspects.isEmpty())
return;
num_suspect_events+=suspects.size();
final List<Address> eligible_mbrs;
synchronized(this) {
suspected_mbrs.addAll(suspects);
eligible_mbrs=new ArrayList<>(members);
eligible_mbrs.removeAll(suspected_mbrs);
has_suspected_mbrs=!suspected_mbrs.isEmpty();
}
// Check if we're coord, then send up the stack
if(local_addr != null && !eligible_mbrs.isEmpty() && local_addr.equals(eligible_mbrs.get(0))) {
log.debug("%s: suspecting %s", local_addr, suspects);
up_prot.up(new Event(Event.SUSPECT, suspects));
down_prot.down(new Event(Event.SUSPECT, suspects));
}
}
protected boolean unsuspect(Address mbr) {
if(mbr == null) return false;
boolean do_unsuspect;
synchronized(this) {
do_unsuspect=!suspected_mbrs.isEmpty() && suspected_mbrs.remove(mbr);
if(do_unsuspect)
has_suspected_mbrs=!suspected_mbrs.isEmpty();
}
if(do_unsuspect) {
up_prot.up(new Event(Event.UNSUSPECT, mbr));
down_prot.down(new Event(Event.UNSUSPECT, mbr));
}
return do_unsuspect;
}
protected String _printTimestamps() {
StringBuilder sb=new StringBuilder();
long current_time=getTimestamp();
for(Map.Entry<InetAddress,Long> entry: timestamps.entrySet()) {
sb.append(entry.getKey()).append(": ");
sb.append(TimeUnit.SECONDS.convert(current_time - entry.getValue(), TimeUnit.NANOSECONDS)).append(" secs old\n");
}
return sb.toString();
}
protected void updateTimestampFor(InetAddress host) {
timestamps.put(host, getTimestamp());
}
/** Returns the age (in secs) of the given host */
protected long getAgeOf(InetAddress host) {
Long ts=timestamps.get(host);
return ts != null? TimeUnit.SECONDS.convert(getTimestamp() - ts, TimeUnit.NANOSECONDS) : -1;
}
protected long getTimestamp() {
return use_time_service && time_service != null? time_service.timestamp() : System.nanoTime();
}
/** Selected members run this task periodically. The task pings all hosts except self using ping_command.
* When a host is not seen as alive, all members associated with that host are suspected */
protected class PingTask implements Runnable {
public void run() {
List<InetAddress> targets;
synchronized(hosts) {
targets=new ArrayList<>(hosts.keySet());
}
targets.remove(local_host);
for(InetAddress target: targets) {
try {
// Ping each host
boolean is_alive=ping_command.isAlive(target, check_timeout);
num_liveness_checks++;
if(is_alive)
updateTimestampFor(target); // skip the timestamp check, as this host is alive
else {
log.trace("%s: %s is not alive (age=%d secs)",local_addr,target,getAgeOf(target));
// Check timestamp - we didn't get a response to the liveness check
long current_time=getTimestamp();
long timestamp=timestamps.get(target);
long diff=TimeUnit.MILLISECONDS.convert(current_time - timestamp,TimeUnit.NANOSECONDS);
if(diff >= timeout)
suspect(target);
}
}
catch(Exception e) {
log.error(local_addr + ": ping command failed", e);
}
}
}
}
/** Command used to check whether a given host is alive, periodically called */
public interface PingCommand {
/**
* Checks whether a given host is alive
* @param host The host to be checked for liveness
* @param timeout Number of milliseconds to wait for the check to complete
* @return true if the host is alive, else false
*/
boolean isAlive(InetAddress host, long timeout) throws Exception;
}
public static class IsReachablePingCommand implements PingCommand {
public boolean isAlive(InetAddress host, long timeout) throws Exception {
return host.isReachable((int)timeout);
}
}
protected static class ExternalPingCommand implements PingCommand {
protected final String cmd;
public ExternalPingCommand(String cmd) {
this.cmd=cmd;
}
public boolean isAlive(InetAddress host, long timeout) throws Exception {
return CommandExecutor2.execute(cmd + " " + host.getHostAddress()) == 0;
}
}
public static class CommandExecutor2 {
public static int execute(String command) throws Exception {
Process p=Runtime.getRuntime().exec(command);
return p.waitFor();
}
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.java.HasJavaAbi;
import com.facebook.buck.java.JavacOptions;
import com.facebook.buck.java.JavacStep;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.HasBuildTarget;
import com.facebook.buck.rules.AbiRule;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildOutputInitializer;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.InitializableFromDisk;
import com.facebook.buck.rules.OnDiskBuildInfo;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.Sha1HashCode;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.step.AbstractExecutionStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.WriteFileStep;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.nio.file.Path;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Buildable that takes in a list of {@link HasAndroidResourceDeps} and for each of these rules,
* first creates an {@code R.java} file using {@link MergeAndroidResourcesStep} and compiles it to
* generate a corresponding {@code R.class} file. These are called "dummy" {@code R.java} files
* since these are later merged together into a single {@code R.java} file by {@link AaptStep}.
*/
public class DummyRDotJava extends AbstractBuildRule
implements AbiRule, HasJavaAbi, InitializableFromDisk<DummyRDotJava.BuildOutput> {
private final ImmutableList<HasAndroidResourceDeps> androidResourceDeps;
private final JavacOptions javacOptions;
private final BuildOutputInitializer<BuildOutput> buildOutputInitializer;
@VisibleForTesting
static final String METADATA_KEY_FOR_ABI_KEY = "DUMMY_R_DOT_JAVA_ABI_KEY";
public DummyRDotJava(
BuildRuleParams params,
Set<HasAndroidResourceDeps> androidResourceDeps,
JavacOptions javacOptions) {
super(params);
// Sort the input so that we get a stable ABI for the same set of resources.
this.androidResourceDeps = FluentIterable.from(androidResourceDeps)
.toSortedList(HasBuildTarget.BUILD_TARGET_COMPARATOR);
this.javacOptions = Preconditions.checkNotNull(javacOptions);
this.buildOutputInitializer = new BuildOutputInitializer<>(params.getBuildTarget(), this);
}
@Override
public ImmutableCollection<Path> getInputsToCompareToOutput() {
return ImmutableSet.of();
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context,
final BuildableContext buildableContext) {
ImmutableList.Builder<Step> steps = ImmutableList.builder();
final Path rDotJavaSrcFolder = getRDotJavaSrcFolder(getBuildTarget());
steps.add(new MakeCleanDirectoryStep(rDotJavaSrcFolder));
// Generate the .java files and record where they will be written in javaSourceFilePaths.
Set<SourcePath> javaSourceFilePaths;
if (androidResourceDeps.isEmpty()) {
// In this case, the user is likely running a Robolectric test that does not happen to
// depend on any resources. However, if Robolectric doesn't find an R.java file, it flips
// out, so we have to create one, anyway.
// TODO(mbolin): Stop hardcoding com.facebook. This should match the package in the
// associated TestAndroidManifest.xml file.
Path emptyRDotJava = rDotJavaSrcFolder.resolve("com/facebook/R.java");
steps.add(new MakeCleanDirectoryStep(emptyRDotJava.getParent()));
steps.add(new WriteFileStep(
"package com.facebook;\n public class R {}\n",
emptyRDotJava));
javaSourceFilePaths = ImmutableSet.<SourcePath>of(new PathSourcePath(emptyRDotJava));
} else {
MergeAndroidResourcesStep mergeStep = new MergeAndroidResourcesStep(
androidResourceDeps,
/* uberRDotTxt */ Optional.<Path>absent(),
rDotJavaSrcFolder);
steps.add(mergeStep);
javaSourceFilePaths = mergeStep.getRDotJavaFiles();
}
// Clear out the directory where the .class files will be generated.
final Path rDotJavaClassesFolder = getRDotJavaBinFolder();
steps.add(new MakeCleanDirectoryStep(rDotJavaClassesFolder));
Path pathToAbiOutputDir = getPathToAbiOutputDir(getBuildTarget());
steps.add(new MakeCleanDirectoryStep(pathToAbiOutputDir));
Path pathToAbiOutputFile = pathToAbiOutputDir.resolve("abi");
// Compile the .java files.
final JavacStep javacStep =
RDotJava.createJavacStepForDummyRDotJavaFiles(
javaSourceFilePaths,
rDotJavaClassesFolder,
Optional.of(pathToAbiOutputFile),
javacOptions,
getBuildTarget());
steps.add(javacStep);
steps.add(new AbstractExecutionStep("record_abi_key") {
@Override
public int execute(ExecutionContext context) {
Sha1HashCode abiKey = javacStep.getAbiKey();
Preconditions.checkNotNull(abiKey,
"Javac step must create a non-null ABI key for this rule.");
buildableContext.addMetadata(METADATA_KEY_FOR_ABI_KEY, abiKey.getHash());
return 0;
}
});
buildableContext.recordArtifactsInDirectory(rDotJavaClassesFolder);
return steps.build();
}
@Override
public Sha1HashCode getAbiKeyForDeps() {
return HasAndroidResourceDeps.ABI_HASHER.apply(androidResourceDeps);
}
@Override
public RuleKey.Builder appendDetailsToRuleKey(RuleKey.Builder builder) {
return javacOptions.appendToRuleKey(builder);
}
private static Path getRDotJavaSrcFolder(BuildTarget buildTarget) {
return BuildTargets.getBinPath(buildTarget, "__%s_rdotjava_src__");
}
private static Path getRDotJavaBinFolder(BuildTarget buildTarget) {
return BuildTargets.getBinPath(buildTarget, "__%s_rdotjava_bin__");
}
private static Path getPathToAbiOutputDir(BuildTarget buildTarget) {
return BuildTargets.getGenPath(buildTarget, "__%s_dummyrdotjava_abi__");
}
@Nullable
@Override
public Path getPathToOutputFile() {
return null;
}
public Path getRDotJavaBinFolder() {
return getRDotJavaBinFolder(getBuildTarget());
}
public ImmutableList<HasAndroidResourceDeps> getAndroidResourceDeps() {
return androidResourceDeps;
}
@VisibleForTesting
JavacOptions getJavacOptions() {
return javacOptions;
}
@Override
public BuildOutput initializeFromDisk(OnDiskBuildInfo onDiskBuildInfo) {
Optional<Sha1HashCode> abiKey = onDiskBuildInfo.getHash(METADATA_KEY_FOR_ABI_KEY);
if (!abiKey.isPresent()) {
throw new IllegalStateException(String.format(
"Should not be initializing %s from disk if ABI key is not written",
this));
}
return new BuildOutput(abiKey.get());
}
@Override
public BuildOutputInitializer<BuildOutput> getBuildOutputInitializer() {
return buildOutputInitializer;
}
@Override
public Sha1HashCode getAbiKey() {
return buildOutputInitializer.getBuildOutput().rDotTxtSha1;
}
public static class BuildOutput {
@VisibleForTesting
final Sha1HashCode rDotTxtSha1;
public BuildOutput(Sha1HashCode rDotTxtSha1) {
this.rDotTxtSha1 = Preconditions.checkNotNull(rDotTxtSha1);
}
}
}
| |
/*******************************************************************************
* Copyright 2014 Rafael Garcia Moreno.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.bladecoder.engineeditor.scneditor;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.text.MessageFormat;
import java.util.List;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.GlyphLayout;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.graphics.glutils.HdpiUtils;
import com.badlogic.gdx.math.Polygon;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.Widget;
import com.badlogic.gdx.scenes.scene2d.utils.Drawable;
import com.badlogic.gdx.scenes.scene2d.utils.ScissorStack;
import com.badlogic.gdx.scenes.scene2d.utils.TiledDrawable;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Timer;
import com.badlogic.gdx.utils.Timer.Task;
import com.bladecoder.engine.anim.AnimationDesc;
import com.bladecoder.engine.anim.Tween;
import com.bladecoder.engine.assets.EngineAssetManager;
import com.bladecoder.engine.model.AnchorActor;
import com.bladecoder.engine.model.AnimationRenderer;
import com.bladecoder.engine.model.BaseActor;
import com.bladecoder.engine.model.InteractiveActor;
import com.bladecoder.engine.model.Scene;
import com.bladecoder.engine.model.SceneLayer;
import com.bladecoder.engine.model.SpriteActor;
import com.bladecoder.engine.spine.SpineRenderer;
import com.bladecoder.engine.util.RectangleRenderer;
import com.bladecoder.engineeditor.Ctx;
import com.bladecoder.engineeditor.common.EditorLogger;
import com.bladecoder.engineeditor.common.Message;
import com.bladecoder.engineeditor.model.Project;
import com.bladecoder.engineeditor.scneditor.ScnWidgetInputListener.DraggingModes;
public class ScnWidget extends Widget {
private static final Color BLACK_TRANSPARENT = new Color(0f, 0f, 0f, 0.5f);
// TMPs to avoid GC calls
private final Vector3 tmpV3 = new Vector3();
private final Vector2 tmpV2 = new Vector2();
private final Vector2 tmp2V2 = new Vector2();
private final Vector3 tmpV3Draw = new Vector3();
private final Vector2 tmpV2Transform = new Vector2();
private final SpriteBatch sceneBatch = new SpriteBatch();
private final CanvasDrawer drawer = new CanvasDrawer();
private final AnimationDrawer faRenderer = new AnimationDrawer();
private final ScnWidgetInputListener inputListner = new ScnWidgetInputListener(this);
private final Rectangle bounds = new Rectangle();
private final Rectangle scissors = new Rectangle();
private Scene scn;
private BaseActor selectedActor = null;
private boolean inScene = false;
private boolean animation = true;
private static final int[] zoomLevels = { 5, 10, 16, 25, 33, 50, 66, 100, 150, 200, 300, 400, 600, 800, 1000 };
private int zoomLevel = 100;
private BitmapFont bigFont;
private BitmapFont defaultFont;
private TiledDrawable tile;
private Drawable background;
private boolean loading = false;
private boolean loadingError = false;
private boolean showSpriteBounds = true;
private final GlyphLayout textLayout = new GlyphLayout();
private final OrthographicCamera camera = new OrthographicCamera();
private final TextureRegion scnMoveIcon;
private final TextureRegion scnRotateIcon;
private final TextureRegion scnScaleLockIcon;
private final TextureRegion scnScaleIcon;
/**
* The NOTIFY_PROJECT_LOADED listener is called from other thread. This flag is
* to recreate the scene in the OpenGL thread.
*/
private boolean projectLoadedFlag = false;
public ScnWidget(Skin skin) {
bigFont = skin.get("big-font", BitmapFont.class);
defaultFont = skin.get("default-font", BitmapFont.class);
setSize(150, 150);
tile = new TiledDrawable(Ctx.assetManager.getIcon("transparent-light"));
background = skin.getDrawable("background");
faRenderer.setViewport(getWidth(), getHeight());
setLayoutEnabled(true);
addListener(inputListner);
Ctx.project.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent e) {
EditorLogger.debug("ScnWidget Listener: " + e.getPropertyName());
if (e.getPropertyName().equals(Project.NOTIFY_SCENE_SELECTED)) {
if (!projectLoadedFlag)
setSelectedScene(Ctx.project.getSelectedScene());
} else if (e.getPropertyName().equals(Project.NOTIFY_ACTOR_SELECTED)) {
if (!projectLoadedFlag)
setSelectedActor(Ctx.project.getSelectedActor());
} else if (e.getPropertyName().equals(Project.NOTIFY_ANIM_SELECTED)) {
if (!projectLoadedFlag && Ctx.project.getSelectedFA() != null)
setSelectedFA(Ctx.project.getSelectedFA());
} else if (e.getPropertyName().equals(Project.NOTIFY_PROJECT_LOADED)) {
projectLoadedFlag = true;
} else if (e.getPropertyName().equals("scene")) {
setSelectedScene(Ctx.project.getSelectedScene());
setSelectedActor(Ctx.project.getSelectedActor());
} else if (e.getPropertyName().equals("init_animation")) {
if (!inScene)
setSelectedFA(null);
}
}
});
showSpriteBounds = Boolean
.parseBoolean(Ctx.project.getEditorConfig().getProperty("view.showSpriteBounds", "true"));
inScene = Boolean.parseBoolean(Ctx.project.getEditorConfig().getProperty("view.inScene", "false"));
animation = Boolean.parseBoolean(Ctx.project.getEditorConfig().getProperty("view.animation", "true"));
scnMoveIcon = Ctx.assetManager.getIcon("scn_move");
scnRotateIcon = Ctx.assetManager.getIcon("scn_rotate");
scnScaleLockIcon = Ctx.assetManager.getIcon("scn_scale_lock");
scnScaleIcon = Ctx.assetManager.getIcon("scn_scale");
}
public OrthographicCamera getCamera() {
return camera;
}
@Override
public void act(float delta) {
if (projectLoadedFlag) {
projectLoadedFlag = false;
if (scn != null) {
scn.dispose();
scn = null;
}
setSelectedScene(Ctx.project.getSelectedScene());
setSelectedActor(Ctx.project.getSelectedActor());
setSelectedFA(Ctx.project.getSelectedFA());
}
if (scn != null && !loading && !loadingError) {
if (!inScene)
faRenderer.update(delta);
// scn.update(delta);
for (SceneLayer layer : scn.getLayers())
layer.update();
if (animation) {
for (BaseActor a : scn.getActors().values()) {
boolean v = a.isVisible();
a.setVisible(true);
a.update(delta);
a.setVisible(v);
}
}
handleKeyPositioning();
}
}
private void handleKeyPositioning() {
if (getStage() == null || getStage().getKeyboardFocus() != this)
return;
if (Gdx.input.isKeyPressed(Keys.UP) || Gdx.input.isKeyPressed(Keys.DOWN) || Gdx.input.isKeyPressed(Keys.LEFT)
|| Gdx.input.isKeyPressed(Keys.RIGHT)) {
BaseActor selActor = getSelectedActor();
if (selActor == null)
return;
if (Gdx.input.isKeyPressed(Keys.UP))
// p.translate(0, 1);
selActor.setPosition(selActor.getX(), selActor.getY() + 1);
else if (Gdx.input.isKeyPressed(Keys.DOWN))
// p.translate(0, -1);
selActor.setPosition(selActor.getX(), selActor.getY() - 1);
else if (Gdx.input.isKeyPressed(Keys.LEFT))
// p.translate(-1, 0);
selActor.setPosition(selActor.getX() - 1, selActor.getY());
else if (Gdx.input.isKeyPressed(Keys.RIGHT))
// p.translate(1, 0);
selActor.setPosition(selActor.getX() + 1, selActor.getY());
}
}
@Override
public void draw(Batch batch, float parentAlpha) {
validate();
float tmp = batch.getPackedColor();
batch.setColor(Color.WHITE);
if (scn != null && !loading && !loadingError) {
// BACKGROUND
batch.disableBlending();
tile.draw(batch, getX(), getY(), getWidth(), getHeight());
batch.enableBlending();
Vector3 v = tmpV3Draw.set(getX(), getY(), 0);
v = v.prj(batch.getTransformMatrix());
batch.end();
HdpiUtils.glViewport((int) v.x, (int) v.y, (int) getWidth(), (int) (getHeight()));
getStage().calculateScissors(bounds, scissors);
if (ScissorStack.pushScissors(scissors)) {
// WORLD CAMERA
sceneBatch.setProjectionMatrix(camera.combined);
sceneBatch.begin();
Array<AtlasRegion> scnBackground = scn.getBackground();
if (scnBackground != null) {
sceneBatch.disableBlending();
float x = 0;
for (AtlasRegion tile : scnBackground) {
sceneBatch.draw(tile, x, 0f);
x += tile.getRegionWidth();
}
sceneBatch.enableBlending();
}
// draw layers from bottom to top
List<SceneLayer> layers = scn.getLayers();
for (int i = layers.size() - 1; i >= 0; i--) {
SceneLayer layer = layers.get(i);
if (!layer.isVisible())
continue;
List<InteractiveActor> actors = layer.getActors();
for (InteractiveActor a : actors) {
if (a instanceof SpriteActor && Ctx.project.isEditorVisible(a)) {
boolean visibility = a.isVisible();
a.setVisible(true);
((SpriteActor) a).draw(sceneBatch);
a.setVisible(visibility);
}
}
}
sceneBatch.end();
ScissorStack.popScissors();
}
drawer.drawBGBounds();
drawer.drawBBoxActors(scn, showSpriteBounds);
if (selectedActor != null) {
drawer.drawSelectedActor(selectedActor);
}
getStage().getViewport().apply();
// SCREEN CAMERA
batch.begin();
drawFakeDepthMarkers((SpriteBatch) batch);
if (!inScene) {
faRenderer.draw((SpriteBatch) batch);
}
// DRAW selected actor ICONS
if (selectedActor != null) {
drawTransformIcons((SpriteBatch) batch, selectedActor);
}
// DRAW COORDS
Vector2 coords = tmpV2Transform.set(Gdx.input.getX(), Gdx.input.getY());
screenToWorldCoords(coords);
String str = MessageFormat.format("({0}, {1})", (int) coords.x, (int) coords.y);
textLayout.setText(defaultFont, str);
RectangleRenderer.draw(batch, 0f, getY() + getHeight() - textLayout.height - 15, textLayout.width + 10,
textLayout.height + 10, BLACK_TRANSPARENT);
defaultFont.draw(batch, textLayout, 5, getHeight() + getY() - 10);
batch.setPackedColor(tmp);
} else {
background.draw(batch, getX(), getY(), getWidth(), getHeight());
String s;
if (loading) {
s = "LOADING...";
Timer.post(new Task() {
@Override
public void run() {
loading = false;
try {
EngineAssetManager.getInstance().finishLoading();
scn.retrieveAssets();
// disable Spine events
for (BaseActor a : scn.getActors().values()) {
if (a instanceof SpriteActor
&& ((SpriteActor) a).getRenderer() instanceof SpineRenderer) {
((SpineRenderer) ((SpriteActor) a).getRenderer()).enableEvents(false);
}
}
drawer.setCamera(camera);
invalidate();
} catch (Exception e) {
Message.showMsg(getStage(), "Could not load assets for scene", 4);
EditorLogger.printStackTrace(e);
loadingError = true;
loading = false;
}
}
});
} else if (loadingError) {
s = "ERROR IN SCENE DATA. CANNOT DISPLAY SCENE";
} else if (!Ctx.project.isLoaded()) {
s = "CREATE OR LOAD A PROJECT";
} else {
s = "THERE ARE NO SCENES IN THIS CHAPTER YET";
}
textLayout.setText(bigFont, s);
bigFont.draw(batch, textLayout, (getWidth() - textLayout.width) / 2,
getHeight() / 2 + bigFont.getLineHeight() * 3);
}
}
private void drawTransformIcons(SpriteBatch batch, BaseActor a) {
Polygon p = a.getBBox();
if (!(a instanceof AnchorActor)) {
if (a instanceof InteractiveActor) {
InteractiveActor ia = (InteractiveActor) a;
if (!scn.getLayer(ia.getLayer()).isVisible())
return;
}
Rectangle r = p.getBoundingRectangle();
worldToScreenCoords(tmpV2Transform.set(r.x, r.y));
float x = tmpV2Transform.x;
float y = tmpV2Transform.y;
worldToScreenCoords(tmpV2Transform.set(r.x + r.width, r.y + r.height));
float x2 = tmpV2Transform.x;
float y2 = tmpV2Transform.y;
batch.draw(scnMoveIcon, x + (x2 - x - scnMoveIcon.getRegionWidth()) / 2, y2);
if (a instanceof SpriteActor) {
batch.draw(scnRotateIcon, x2 - scnRotateIcon.getRegionWidth() / 3,
y2 - scnRotateIcon.getRegionHeight() / 3);
if (!((SpriteActor) a).getFakeDepth()) {
batch.draw(scnScaleLockIcon, x - scnScaleLockIcon.getRegionWidth(), y2);
batch.draw(scnScaleIcon, x - scnScaleIcon.getRegionWidth(), y - scnScaleIcon.getRegionHeight());
}
}
}
}
public boolean inTransformIcon(float px, float py, DraggingModes dm) {
Polygon p = selectedActor.getBBox();
Rectangle r = p.getBoundingRectangle();
worldToScreenCoords(tmpV2Transform.set(r.x, r.y));
float x = tmpV2Transform.x;
float y = tmpV2Transform.y;
worldToScreenCoords(tmpV2Transform.set(r.x + r.width, r.y + r.height));
float x2 = tmpV2Transform.x;
float y2 = tmpV2Transform.y;
Rectangle r2 = null;
if (dm == DraggingModes.ROTATE_ACTOR) {
r2 = new Rectangle(x2 - scnRotateIcon.getRegionWidth() / 3, y2 - scnRotateIcon.getRegionHeight() / 3,
scnRotateIcon.getRegionWidth(), scnRotateIcon.getRegionHeight());
} else if (dm == DraggingModes.SCALE_ACTOR) {
r2 = new Rectangle(x - scnScaleIcon.getRegionWidth(), y - scnScaleIcon.getRegionHeight(),
scnScaleIcon.getRegionWidth(), scnScaleIcon.getRegionHeight());
} else if (dm == DraggingModes.SCALE_LOCK_ACTOR) {
r2 = new Rectangle(x - scnScaleLockIcon.getRegionWidth(), y2, scnScaleLockIcon.getRegionWidth(),
scnScaleLockIcon.getRegionHeight());
} else if (dm == DraggingModes.DRAGGING_ACTOR) {
r2 = new Rectangle(x + (x2 - x - scnMoveIcon.getRegionWidth()) / 2, y2, scnMoveIcon.getRegionWidth(),
scnMoveIcon.getRegionHeight());
}
worldToScreenCoords(tmpV2Transform.set(px, py));
return r2.contains(tmpV2Transform.x, tmpV2Transform.y);
}
private void drawFakeDepthMarkers(SpriteBatch batch) {
int margin = 5;
Vector2 d = scn.getDepthVector();
if (d == null)
return;
tmp2V2.x = 0;
tmp2V2.y = d.y;
worldToScreenCoords(tmp2V2);
String s = "100%";
textLayout.setText(defaultFont, s);
float posx = tmp2V2.x - textLayout.width - 20;
RectangleRenderer.draw(batch, posx, tmp2V2.y, textLayout.width + margin * 2, textLayout.height + margin * 2,
Color.BLACK);
RectangleRenderer.draw(batch, tmp2V2.x - 20, tmp2V2.y, 20, 2, Color.BLACK);
defaultFont.draw(batch, textLayout, posx + margin, tmp2V2.y + textLayout.height + margin);
tmp2V2.x = 0;
tmp2V2.y = d.x;
worldToScreenCoords(tmp2V2);
s = "0%";
textLayout.setText(defaultFont, s);
posx = tmp2V2.x - textLayout.width - 20;
RectangleRenderer.draw(batch, posx, tmp2V2.y, textLayout.width + margin * 2, textLayout.height + margin * 2,
Color.BLACK);
RectangleRenderer.draw(batch, tmp2V2.x - 20, tmp2V2.y, 20, 2, Color.BLACK);
defaultFont.draw(batch, textLayout, posx + margin, tmp2V2.y + textLayout.height + margin);
}
public void setInSceneSprites(boolean v) {
inScene = v;
Ctx.project.getEditorConfig().setProperty("view.inScene", Boolean.toString(inScene));
if (!inScene)
setSelectedFA(null);
}
public boolean getInSceneSprites() {
return inScene;
}
public void setAnimation(boolean v) {
animation = v;
Ctx.project.getEditorConfig().setProperty("view.animation", Boolean.toString(animation));
}
public boolean getAnimation() {
return animation;
}
public void setAnimationRenderer(BaseActor a, AnimationDesc fa) {
try {
faRenderer.setActor(a);
faRenderer.setAnimation(fa);
} catch (Exception e) {
Message.showMsg(getStage(), "Could not retrieve assets for sprite: " + fa.id, 4);
EditorLogger.printStackTrace(e);
faRenderer.setAnimation(null);
}
}
public boolean getShowSpriteBounds() {
return showSpriteBounds;
}
public void setShowSpriteBounds(boolean v) {
showSpriteBounds = v;
Ctx.project.getEditorConfig().setProperty("view.showSpriteBounds", Boolean.toString(showSpriteBounds));
}
@Override
public void layout() {
// EditorLogger.debug("LAYOUT SIZE CHANGED - X: " + getX() + " Y: "
// + getY() + " Width: " + getWidth() + " Height: " + getHeight());
// EditorLogger.debug("Last Point coords - X: " + (getX() + getWidth())
// + " Y: " + (getY() + getHeight()));
localToScreenCoords(tmpV2.set(getX() + getWidth(), getY() + getHeight()));
// EditorLogger.debug("Screen Last Point coords: " + tmpV2);
faRenderer.setViewport(getWidth(), getHeight());
bounds.set(getX(), getY(), getWidth(), getHeight());
// SETS WORLD CAMERA
if (scn != null) {
float aspect = getWidth() / getHeight();
float wWidth = Ctx.project.getWorld().getWidth();
float wHeight = Ctx.project.getWorld().getHeight();
float aspectWorld = wWidth / wHeight;
if (aspectWorld > aspect) {
wHeight = wWidth / aspect;
} else {
wWidth = wHeight * aspect;
}
zoomLevel = 100;
camera.setToOrtho(false, wWidth, wHeight);
camera.zoom = 1f;
camera.position.set(Ctx.project.getWorld().getWidth() / 2, Ctx.project.getWorld().getHeight() / 2, 0);
camera.update();
zoom(+1);
}
}
public void zoom(int amount) {
if (zoomLevel == zoomLevels[0] && amount < 0) {
zoomLevel = zoomLevels[1];
} else if (zoomLevel == zoomLevels[zoomLevels.length - 1] && amount > 0) {
zoomLevel = zoomLevels[zoomLevels.length - 2];
} else {
for (int i = 1; i < zoomLevels.length - 1; i++) {
if (zoomLevels[i] == zoomLevel) {
zoomLevel = amount > 0 ? zoomLevels[i - 1] : zoomLevels[i + 1];
break;
}
}
}
if (scn != null) {
camera.zoom = 100f / zoomLevel;
camera.update();
}
}
public void translate(Vector2 delta) {
// EditorLogger.debug("TRANSLATING - X: " + delta.x + " Y: " + delta.y);
if (scn != null) {
camera.translate(-delta.x, -delta.y, 0);
camera.update();
}
}
public void localToScreenCoords(Vector2 coords) {
localToStageCoordinates(coords);
getStage().stageToScreenCoordinates(coords);
}
public void localToWorldCoords(Vector2 coords) {
localToStageCoordinates(coords);
getStage().stageToScreenCoordinates(coords);
tmpV3.set(coords.x, coords.y, 0);
camera.unproject(tmpV3, getX(), getY(), getWidth(), getHeight());
coords.set(tmpV3.x, tmpV3.y);
}
public void screenToWorldCoords(Vector2 coords) {
tmpV2.set(0, 0);
localToStageCoordinates(tmpV2);
// getStage().stageToScreenCoordinates(tmpV2);
tmpV3.set(coords.x, coords.y, 0);
camera.unproject(tmpV3, tmpV2.x, tmpV2.y, getWidth(), getHeight());
coords.set(tmpV3.x, tmpV3.y);
}
public void worldToScreenCoords(Vector2 coords) {
tmpV2.set(getX(), getY());
localToStageCoordinates(tmpV2);
tmpV3.set(coords.x, coords.y, 0);
camera.project(tmpV3, tmpV2.x, tmpV2.y, getWidth(), getHeight());
coords.set(tmpV3.x, tmpV3.y);
stageToLocalCoordinates(coords);
}
public boolean inScaleIcon(float px, float py) {
Polygon p = selectedActor.getBBox();
if (selectedActor instanceof SpriteActor) {
InteractiveActor ia = (InteractiveActor) selectedActor;
if (!scn.getLayer(ia.getLayer()).isVisible())
return false;
Rectangle r = p.getBoundingRectangle();
worldToScreenCoords(tmpV2Transform.set(r.x + r.width, r.y + r.height));
float x = tmpV2Transform.x;
float y = tmpV2Transform.y;
Rectangle r2 = new Rectangle(x - scnMoveIcon.getRegionWidth() / 2, y, scnMoveIcon.getRegionWidth(),
scnMoveIcon.getRegionHeight());
worldToScreenCoords(tmpV2Transform.set(px, py));
return r2.contains(tmpV2Transform.x, tmpV2Transform.y);
}
return false;
}
public Scene getScene() {
return scn;
}
public BaseActor getSelectedActor() {
return selectedActor;
}
public void setSelectedScene(Scene s) {
if (scn != null) {
scn.dispose();
faRenderer.dispose();
scn = null;
EngineAssetManager.getInstance().clear();
}
loadingError = false;
setSelectedActor(null);
if (s != null) {
scn = s;
scn.loadAssets();
loading = true;
}
// SETS WORLD CAMERA
if (scn != null) {
float aspect = getWidth() / getHeight();
float wWidth = Ctx.project.getWorld().getWidth();
float wHeight = Ctx.project.getWorld().getHeight();
float aspectWorld = wWidth / wHeight;
if (aspectWorld > aspect) {
wHeight = wWidth / aspect;
} else {
wWidth = wHeight * aspect;
}
zoomLevel = 100;
camera.setToOrtho(false, wWidth, wHeight);
camera.zoom = 1f;
camera.update();
// translate(new Vector2((-getWidth() + wWidth ) / 2 *
// camera.zoom,
// (-getHeight() + wHeight) / 2 * camera.zoom));
translate(new Vector2(0, (-getHeight() + wHeight) / 2));
}
}
public void setSelectedActor(BaseActor actor) {
BaseActor a = null;
if (scn != null && actor != null) {
a = actor;
}
selectedActor = a;
// faRenderer.setActor(a);
setAnimationRenderer(null, null);
}
public void setSelectedFA(String selFA) {
if (selectedActor instanceof SpriteActor
&& ((SpriteActor) selectedActor).getRenderer() instanceof AnimationRenderer) {
AnimationRenderer s = (AnimationRenderer) ((SpriteActor) selectedActor).getRenderer();
if (selFA == null || (s.getAnimations().get(selFA) == null
&& s.getAnimations().get(AnimationRenderer.getFlipId(selFA)) == null)) {
selFA = s.getInitAnimation();
}
if (selFA != null && (s.getAnimations().get(selFA) != null
|| s.getAnimations().get(AnimationRenderer.getFlipId(selFA)) != null)) {
setAnimationRenderer(selectedActor, s.getAnimations().get(selFA));
String animInScene = selFA;
if (!inScene && s.getInitAnimation() != null)
animInScene = s.getInitAnimation();
try {
((SpriteActor) selectedActor).startAnimation(animInScene, Tween.Type.REPEAT, Tween.INFINITY, null);
} catch (Exception e) {
setAnimationRenderer(selectedActor, null);
s.getAnimations().remove(selFA);
}
} else {
setAnimationRenderer(selectedActor, null);
}
} else {
setAnimationRenderer(selectedActor, null);
}
}
public void dispose() {
if (scn != null) {
scn.dispose();
scn = null;
}
faRenderer.dispose();
}
}
| |
package web;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import src.Crawler;
import src.Util;
import api.GTranslator;
public class StatisticReader extends HTMLReader {
private final static Logger logger = Logger.getLogger(Crawler.class .getName());
private final static String IDEAS_STATS = "ideas-stats";
private final static String OTHER_STATS = "other-stats";
private final static String IDEAS_IN_REVIEW = "tab-review";
private final static String IDEAS_IN_PROGRESS = "tab-progress";
private final static String IDEAS_COMPLETED = "tab-complete";
private final static String FRAME_TAG = "iframe";
private final static String FACEBOOK_STATS = "u_0_1";
private final static String TWITTER_URL_P = "https://cdn.api.twitter.com/1/urls/count.json?url=";
private final static String TWITTER_URL_S = "&callback=twttr.receiveCount";
private final static String LOGO = "logo";
private final static String EXPLANATION_TEXT = "client-txt";
private final static String TABS = "listing-nav";
private final static String IDEA_VOTES = "vote-number";
private final static String IDEA_COMMENTS_DATE = "comment-date comment-meta";
private final static String IDEA_COMMENTS_ID = "comment-list";
private final static String IDEA_COMMENTS_DESCRIPTION = "comment-content";
private final static String IDEA_COMMENT_AUTHOR_NAME = "comment-author-name";
private final static String IDEA_DESCRIPTION_CLASS = "entry-content";
private final static String IDEA_HREF_TAGS = "/a/ideas/tag/tags/";
private final static String HREF_ATTR = "href";
private final static String IDEA_SIMILAR_ID = "similar-idea-list";
private final static String IDEA_ATTACHMENTS_ID = "attachments-content";
private final static String MODERATOR_LIST_ID = "global-moderator-list";
private final static String IDEA_CREATION_TIME = "published";
private GTranslator translator = null;
DateFormat dateFormat = null;
public StatisticReader() {
super();
prepareUserAgent();
translator = new GTranslator();
dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
public HashMap<String,Object> getCommunityStatistic(String url)
{
HashMap<String,Object> statistics = new HashMap<String,Object>();
statistics.put("ideas", null);
statistics.put("ideas_in_review", null);
statistics.put("ideas_in_progress", null);
statistics.put("ideas_completed", null);
statistics.put("comments", null);
statistics.put("votes", null);
statistics.put("members", null);
statistics.put("facebook", null);
statistics.put("twitter", null);
statistics.put("logo", null);
statistics.put("explanation_text", null);
statistics.put("tabs", null);
statistics.put("moderators", null);
String content;
String textElement;
try {
//url = "http://midata.ideascale.com";
content = getUrlContent(Util.toURI(url));
Document doc = Jsoup.parse(content);
Element ideasStats = doc.getElementById(IDEAS_STATS);
if (ideasStats != null) {
textElement = replaceThounsandSymbol(ideasStats.child(0).text());
if (isNumeric(textElement))
statistics.put("ideas", textElement);
else
throw new Exception("The ideas counter is not numeric. Community: " + url);
}
Element otherStats = doc.getElementById(OTHER_STATS);
if (otherStats != null) {
Elements childrenStats = otherStats.children();
textElement = replaceThounsandSymbol(childrenStats.get(0).
getElementsByClass("number").
get(0).text());
textElement = textElement.replaceAll("[^0-9]+", " ").trim();
if (isNumeric(textElement))
statistics.put("comments", textElement);
else
throw new Exception("The comments counter is not numeric. Community: " + url);
textElement = replaceThounsandSymbol(childrenStats.get(1).
getElementsByClass("number").
get(0).text());
textElement = textElement.replaceAll("[^0-9]+", " ").trim();
if (isNumeric(textElement))
statistics.put("votes",textElement);
else
throw new Exception("The votes counter is not numeric. Community: " + url);
textElement = replaceThounsandSymbol(childrenStats.get(2).
getElementsByClass("number").
get(0).text());
textElement = textElement.replaceAll("[^0-9]+", " ").trim();
if (isNumeric(textElement))
statistics.put("members", textElement);
else
throw new Exception("The members counter is not numeric. Community: " + url);
}
Element ideasInReview = doc.getElementById(IDEAS_IN_REVIEW);
if (ideasInReview != null) {
textElement = ideasInReview.child(0).text();
textElement = textElement.replaceAll("[^0-9]+", " ");
textElement = textElement.trim();
if (isNumeric(textElement))
statistics.put("ideas_in_review",textElement);
else
throw new Exception("The ideas in review counter is not numeric. Community: " + url);
}
Element ideasInProgress = doc.getElementById(IDEAS_IN_PROGRESS);
if (ideasInProgress != null) {
textElement = ideasInProgress.child(0).text();
textElement = textElement.replaceAll("[^0-9]+", " ");
textElement = textElement.trim();
if (isNumeric(textElement))
statistics.put("ideas_in_progress",textElement);
else
throw new Exception("The ideas in progress counter is not numeric. Community: " + url);
}
Element ideasCompleted = doc.getElementById(IDEAS_COMPLETED);
if (ideasCompleted != null) {
textElement = ideasCompleted.child(0).text();
textElement = textElement.replaceAll("[^0-9]+", " ");
textElement = textElement.trim();
if (isNumeric(textElement))
statistics.put("ideas_completed",textElement);
else
throw new Exception("The ideas completed counter is not numeric. Community: " + url);
}
/*Element logo = doc.getElementById(LOGO);
if (logo != null)
statistics.put("logo", "yes");*/
Element explanation = doc.getElementById(EXPLANATION_TEXT);
if (explanation != null) {
textElement = explanation.text();
if (!textElement.isEmpty())
statistics.put("explanation_text", "yes");
}
Element modList = doc.getElementById(MODERATOR_LIST_ID);
if (modList != null) {
statistics.put("moderators", modList.children().size());
}
ArrayList<HashMap<String,String>> tabs = getTabsURL(doc);
if (tabs.isEmpty()) {
statistics.put("tabs", null);
statistics.put("status", "closed");
}
else {
statistics.put("tabs", tabs);
statistics.put("status", "active");
}
HashMap<String,Object> auxStats = getSNCounters(doc,url);
statistics.put("facebook",auxStats.get("facebook"));
statistics.put("twitter",auxStats.get("twitter"));
} catch (Exception e) {
e.printStackTrace();
logger.log(Level.SEVERE,e.getMessage(),e);
}
return statistics;
}
public HashMap<String,Object> getIdeaStatistics(String communityURL,
String ideaURL,
String communityLang)
throws Exception {
HashMap<String,Object> statistics = new HashMap<String,Object>();
String ideaURLEncoded = URLEncoder.encode(ideaURL, "utf-8");
String fullURL = communityURL+ideaURLEncoded;
//String fullURL = "http://pdfvg.ideascale.com/a/dtd/Banda-larga-e-Wi-Fi-Razionalizzazione-e-potenziamento-in-FVG/368965-14912";
statistics.put("description", null);
statistics.put("tags", null);
statistics.put("facebook", null);
statistics.put("twitter", null);
statistics.put("comments", null);
statistics.put("score", null);
statistics.put("attachments", null);
String content = getUrlContent(Util.toURI(communityURL+ideaURL));
//String content = getUrlContent(Util.toURI(fullURL));
Document doc = Jsoup.parse(content);
//Description
Elements desc = doc.getElementsByClass(IDEA_DESCRIPTION_CLASS);
String ideaDescription = "";
for (int i = 0; i < desc.size(); i++)
ideaDescription += desc.get(i).text();
statistics.put("description", ideaDescription);
//If the description is null the idea is inaccessible
if (!ideaDescription.isEmpty()) {
//Tags
Elements tags = doc.getElementsByAttributeValueMatching(HREF_ATTR, IDEA_HREF_TAGS);
if (!tags.isEmpty()) {
String ideaTags = "";
int numTags = tags.size();
for (int i = 0; i < numTags; i++) {
if (i != (numTags - 1))
ideaTags += tags.get(i).text() + ", ";
else
ideaTags += tags.get(i).text();
}
statistics.put("tags", ideaTags);
}
else {
statistics.put("tags", null);
}
//Date
Elements ideaDTElems = doc.getElementsByClass(IDEA_CREATION_TIME);
Date ideaDateTime = null;
if (!ideaDTElems.isEmpty()) {
String[] ideaDT = ideaDTElems.first().attr("title").split("T");
ideaDateTime = dateFormat.parse(ideaDT[0]+" "+ideaDT[1].split("-")[0]);
HashMap<String,String> dates = getDate(ideaDTElems.get(0).text(), communityLang, ideaDateTime);
statistics.put("idea_platform_datetime", dates.get("platform"));
}
//Social Networks
HashMap<String,Object> auxStats = getIdeaSNCounters(doc,fullURL);
statistics.put("facebook", auxStats.get("facebook"));
statistics.put("twitter",auxStats.get("twitter"));
//Get the comment counter and comments meta-info
Element comments = doc.getElementById(IDEA_COMMENTS_ID);
if (comments != null) {
statistics.put("comments", comments.children().size());
ArrayList<HashMap<String,String>> commentsMeta = new ArrayList<HashMap<String,String>>();
commentsMeta = getComments(comments,commentsMeta,"-1", communityLang, ideaDateTime);
statistics.put("comments-meta", commentsMeta);
}
else {
statistics.put("comments", 0);
}
//Get score
Element scoreElem = doc.getElementsByClass("vote-number").first();
if (scoreElem != null)
statistics.put("score", Integer.parseInt(scoreElem.text()));
else
statistics.put("score", 0);
//Get votes meta-info
Element voteElem = doc.getElementById("vote-activity-list");
if (voteElem != null) {
ArrayList<HashMap<String,String>> votesMeta = new ArrayList<HashMap<String,String>>();
for (Element vote : voteElem.children()) {
HashMap<String,String> voteMeta = new HashMap<String,String>();
Elements voter = vote.getElementsByClass("voter");
if (voter.first().children().size() > 1) {
Element eAuthor = voter.first().child(1);
voteMeta.put("author-name", eAuthor.text());
String authorId = eAuthor.attr(HREF_ATTR);
authorId = authorId.substring(authorId.lastIndexOf("/")+1,authorId.length());
authorId = authorId.split("-")[0];
if (isNumeric(authorId))
voteMeta.put("author-id", authorId);
else
voteMeta.put("author-id", "-1");
}
else {
voteMeta.put("author-name", "Unsuscribed User");
voteMeta.put("author-id", "-1");
}
Element type = vote.getElementsByClass("vote").first().child(0);
if (type.getElementsByTag("strong").attr("class").equals("up"))
voteMeta.put("value", "1");
else if (type.getElementsByTag("strong").attr("class").equals("down"))
voteMeta.put("value", "-1");
else
throw new Exception("Couldn't understand vote value " + type.getElementsByTag("strong").text());
Element date = vote.getElementsByClass("vote").first().child(1);
HashMap<String,String> dates = getDate(date.text(), communityLang, ideaDateTime);
voteMeta.put("date", dates.get("approximate"));
voteMeta.put("date_platform", dates.get("platform"));
votesMeta.add(voteMeta);
}
statistics.put("votes-meta", votesMeta);
}
//Get similar ideas
Element similarIdeas = doc.getElementById(IDEA_SIMILAR_ID);
if (similarIdeas != null)
statistics.put("similar", similarIdeas.children().size());
else
statistics.put("similar", 0);
//Get attachments
Element attachment = doc.getElementById(IDEA_ATTACHMENTS_ID);
if (attachment != null) {
Element attachments_list = attachment.child(0);
int numAttachments = attachments_list.children().size();
statistics.put("attachments", numAttachments);
}
else {
statistics.put("attachments", 0);
}
}
return statistics;
}
private ArrayList<HashMap<String,String>> getComments(Element rootComments,
ArrayList<HashMap<String,String>> commentsMeta,
String parent,
String language,
Date ideaDateTime)
throws UnsupportedEncodingException
{
for (Element comment : rootComments.children()) {
HashMap<String,String> commentMeta = new HashMap<String,String>();
String commentId = comment.attr("id").split("-")[1];
Elements childComments = comment.getElementsByClass("child-comments");
if (!childComments.isEmpty()) {
for (int i = 0; i < childComments.size(); i++)
getComments(childComments.get(i),commentsMeta,commentId, language, ideaDateTime);
}
commentMeta.put("id", commentId);
Elements commenter = comment.getElementsByClass(IDEA_COMMENT_AUTHOR_NAME);
if (commenter.first().children().size() > 0) {
Element eAuthor = commenter.first().child(0);
String authorName = eAuthor.text();
commentMeta.put("author-name", authorName);
String authorId = eAuthor.attr(HREF_ATTR);
authorId = authorId.substring(authorId.lastIndexOf("/")+1,authorId.length());
authorId = authorId.split("-")[0];
if (isNumeric(authorId))
commentMeta.put("author-id", authorId);
else
commentMeta.put("author-id", "-1");
}
else {
commentMeta.put("author-name", "Unsuscribed User");
commentMeta.put("author-id", "-1");
}
Element date = comment.getElementsByAttributeValueMatching("class",IDEA_COMMENTS_DATE).first();
HashMap<String,String> dates = getDate(date.text(),language,ideaDateTime);
commentMeta.put("date", dates.get("approximate"));
commentMeta.put("date_platform", dates.get("platform"));
Elements commentDesc = comment.getElementsByClass(IDEA_COMMENTS_DESCRIPTION);
String commentContent = "";
for (int i = 0; i < commentDesc.size(); i++)
commentContent += commentDesc.get(i).text();
commentMeta.put("description", commentContent);
commentMeta.put("parent", parent);
commentsMeta.add(commentMeta);
commentMeta.put("author-type", "crowd");
Element commenterVCard = comment.getElementsByAttributeValueMatching("class", "vcard").first();
String vCard = commenterVCard.attr("class");
if (vCard.contains("idea-submitter"))
commentMeta.put("author-type", "submitter");
else if (vCard.contains("moderator"))
commentMeta.put("author-type", "moderator");
}
return commentsMeta;
}
public ArrayList<HashMap<String,String>> getTabsURL(Document doc) throws Exception {
ArrayList<HashMap<String,String>> tabs = new ArrayList<HashMap<String,String>>();
String numIdeas = "";
Element navTabs = doc.getElementById(TABS);
if (navTabs != null) {
for (Element li : navTabs.children()) {
Element aLink = li.child(0);
numIdeas = aLink.text().replaceAll("[^0-9]+", " ").trim();
if (numIdeas.isEmpty()) {
HashMap<String,String> tab = new HashMap<String,String>();
tab.put("url", aLink.attr("href"));
tab.put("ideas", numIdeas);
tabs.add(tab);
} else {
if (Integer.parseInt(numIdeas) != 0) { //Save only tabs whose list of ideas is not empty
HashMap<String,String> tab = new HashMap<String,String>();
tab.put("url", aLink.attr("href"));
tab.put("ideas", numIdeas);
tabs.add(tab);
}
}
}
}
return tabs;
}
private String replaceThounsandSymbol(String str) {
return str.replace("K","000");
}
public HashMap<String,Object> getSNCounters(Document doc, String url)
throws Exception
{
HashMap<String,Object> snCounters = new HashMap<String,Object>();
snCounters.put("facebook", null);
snCounters.put("twitter", null);
Document docSN;
Elements frameTag = doc.getElementsByTag(FRAME_TAG);
if (!frameTag.isEmpty()) {
Element facebookTag = frameTag.first(); //Should be the Facebook one. WARNING.
String urlSN = facebookTag.attr("src");
String content = getUrlContent(Util.toURI("https:"+urlSN));
docSN = Jsoup.parse(content);
Element facebookStats = docSN.getElementById(FACEBOOK_STATS);
if (facebookStats != null) {
String shared = facebookStats.text();
shared = shared.replaceAll("[^0-9]+", " ");
shared = shared.trim();
if (shared.isEmpty())
snCounters.put("facebook","0");
else {
if (isNumeric(shared))
snCounters.put("facebook",shared);
else
snCounters.put("facebook","0");
}
}
//Get Twitter counter
String twURL = TWITTER_URL_P + URLEncoder.encode(url, "utf-8") +
TWITTER_URL_S;
content = getUrlContent(twURL);
docSN = Jsoup.parse(content);
String textElement = docSN.getElementsByTag("body").text();
String twCounter = textElement.substring(textElement.indexOf(":") + 1, textElement.indexOf(","));
if (isNumeric(twCounter))
snCounters.put("twitter", twCounter);
else
snCounters.put("twitter", "0");
}
return snCounters;
}
public HashMap<String,Object> getIdeaSNCounters(Document doc, String url)
throws Exception
{
HashMap<String,Object> snCounters = new HashMap<String,Object>();
snCounters.put("facebook", null);
snCounters.put("twitter", null);
Document docSN;
String content, urlSN;
Elements facebook = doc.getElementsByAttributeValue("class", "like");
if (!facebook.isEmpty()) {
Elements facebookTag = facebook.first().getElementsByTag(FRAME_TAG);
urlSN = facebookTag.first().attr("src");
urlSN = URLDecoder.decode(urlSN, "UTF-8");
content = getUrlContent(Util.toURI("https:"+urlSN));
docSN = Jsoup.parse(content);
Elements facebookStats = docSN.getElementsByClass("pluginCountTextDisconnected");
if (!facebookStats.isEmpty()) {
String shared = facebookStats.first().text();
shared = shared.replaceAll("[^0-9]+", " ");
shared = shared.trim();
if (shared.isEmpty())
snCounters.put("facebook","0");
else {
if (isNumeric(shared))
snCounters.put("facebook",shared);
else
snCounters.put("facebook","0");
}
}
else {
Util.printMessage("Couldn't get the facebook counter of the idea: " + url, "severe", logger);
}
}
//Get Twitter counter
String twURL = TWITTER_URL_P + url + TWITTER_URL_S;
content = getUrlContent(twURL);
docSN = Jsoup.parse(content);
String textElement = docSN.getElementsByTag("body").text();
String twCounter = textElement.substring(textElement.indexOf(":") + 1, textElement.indexOf(","));
if (isNumeric(twCounter))
snCounters.put("twitter", twCounter);
else
snCounters.put("twitter", "0");
return snCounters;
}
private boolean isNumeric(String num)
{
try {
Integer.parseInt(num);
}
catch(NumberFormatException e) {
return false;
}
return true;
}
private boolean englishDate(String date) {
if (date.indexOf("hours") != -1 || date.indexOf("hour") != -1) {
return true;
} else if (date.indexOf("days") != -1 || date.indexOf("day") != -1) {
return true;
} else if (date.indexOf("months") != -1 || date.indexOf("month") != -1) {
return true;
} else if (date.indexOf("years") != -1 || date.indexOf("year") != -1) {
return true;
}
return false;
}
private HashMap<String,String> getDate(String vagueDate, String communityLanguage, Date ideaDateTime) {
HashMap<String,String> date = new HashMap<String, String>();
Calendar cal = Calendar.getInstance();
cal.setTime(cal.getTime());
int i = 0;
//Find the number
while (!Character.isDigit(vagueDate.charAt(i))) i++;
//Remove all non-numeric characters
vagueDate = vagueDate.substring(i);
int num = Integer.parseInt(vagueDate.replaceAll("[^0-9]+", " ").trim());
String translatedText = "";
if (!englishDate(vagueDate))
translatedText = translator.translateText(vagueDate, communityLanguage, "en");
else
translatedText = vagueDate;
if (translatedText.indexOf("hours") != -1 || translatedText.indexOf("hour") != -1) {
cal.add(Calendar.HOUR_OF_DAY, -num);
} else if (translatedText.indexOf("days") != -1 || translatedText.indexOf("day") != -1) {
cal.add(Calendar.DAY_OF_YEAR, -num);
} else if (translatedText.indexOf("months") != -1 || translatedText.indexOf("month") != -1) {
cal.add(Calendar.MONTH, -num);
} else if (translatedText.indexOf("years") != -1 || translatedText.indexOf("year") != -1) {
cal.add(Calendar.YEAR, -num);
}
date.put("approximate", getRandDate(cal.getTime(),ideaDateTime,cal));
date.put("platform", translatedText);
return date;
}
private String getRandDate(Date maxDate, Date ideaDate, Calendar cal) {
cal.setTime(ideaDate);
Long start = cal.getTimeInMillis();
cal.setTime(maxDate);
Long end = cal.getTimeInMillis();
long value3 = (long)(start + Math.random()*(end - start));
cal.setTimeInMillis(value3);
return dateFormat.format(cal.getTime());
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/common/criteria.proto
package com.google.ads.googleads.v8.common;
/**
* <pre>
* Brand of the product.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.ProductBrandInfo}
*/
public final class ProductBrandInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.common.ProductBrandInfo)
ProductBrandInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProductBrandInfo.newBuilder() to construct.
private ProductBrandInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProductBrandInfo() {
value_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ProductBrandInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ProductBrandInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
value_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_ProductBrandInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_ProductBrandInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.ProductBrandInfo.class, com.google.ads.googleads.v8.common.ProductBrandInfo.Builder.class);
}
private int bitField0_;
public static final int VALUE_FIELD_NUMBER = 2;
private volatile java.lang.Object value_;
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return Whether the value field is set.
*/
@java.lang.Override
public boolean hasValue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return The value.
*/
@java.lang.Override
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
}
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return The bytes for value.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.common.ProductBrandInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.common.ProductBrandInfo other = (com.google.ads.googleads.v8.common.ProductBrandInfo) obj;
if (hasValue() != other.hasValue()) return false;
if (hasValue()) {
if (!getValue()
.equals(other.getValue())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.common.ProductBrandInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Brand of the product.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.common.ProductBrandInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.common.ProductBrandInfo)
com.google.ads.googleads.v8.common.ProductBrandInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_ProductBrandInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_ProductBrandInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.common.ProductBrandInfo.class, com.google.ads.googleads.v8.common.ProductBrandInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v8.common.ProductBrandInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
value_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.common.CriteriaProto.internal_static_google_ads_googleads_v8_common_ProductBrandInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.ProductBrandInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v8.common.ProductBrandInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.common.ProductBrandInfo build() {
com.google.ads.googleads.v8.common.ProductBrandInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.ProductBrandInfo buildPartial() {
com.google.ads.googleads.v8.common.ProductBrandInfo result = new com.google.ads.googleads.v8.common.ProductBrandInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.value_ = value_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.common.ProductBrandInfo) {
return mergeFrom((com.google.ads.googleads.v8.common.ProductBrandInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.common.ProductBrandInfo other) {
if (other == com.google.ads.googleads.v8.common.ProductBrandInfo.getDefaultInstance()) return this;
if (other.hasValue()) {
bitField0_ |= 0x00000001;
value_ = other.value_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.common.ProductBrandInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.common.ProductBrandInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object value_ = "";
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return Whether the value field is set.
*/
public boolean hasValue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return The value.
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return The bytes for value.
*/
public com.google.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @param value The value to set.
* @return This builder for chaining.
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
value_ = value;
onChanged();
return this;
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @return This builder for chaining.
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000001);
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* <pre>
* String value of the product brand.
* </pre>
*
* <code>optional string value = 2;</code>
* @param value The bytes for value to set.
* @return This builder for chaining.
*/
public Builder setValueBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
value_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.common.ProductBrandInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.common.ProductBrandInfo)
private static final com.google.ads.googleads.v8.common.ProductBrandInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.common.ProductBrandInfo();
}
public static com.google.ads.googleads.v8.common.ProductBrandInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProductBrandInfo>
PARSER = new com.google.protobuf.AbstractParser<ProductBrandInfo>() {
@java.lang.Override
public ProductBrandInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ProductBrandInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ProductBrandInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProductBrandInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.common.ProductBrandInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* @(#)Apply.java
*
* Copyright 2003-2005 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for use in
* the design, construction, operation or maintenance of any nuclear facility.
*/
package com.connexta.arbitro.cond;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import com.connexta.arbitro.ParsingException;
import com.connexta.arbitro.XACMLConstants;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.connexta.arbitro.PolicyMetaData;
import com.connexta.arbitro.ctx.EvaluationCtx;
/**
* Represents the XACML ApplyType and ConditionType XML types.
* <p>
* Note well: as of 2.0, there is no longer a notion of a separate higher- order bag function.
* Instead, if needed, it is supplied as one of the <code>Expression</code>s in the parameter list.
* As such, when this <code>Apply</code> is evaluated, it no longer pre-evaluates all the parameters
* if a bag function is used. It is now up to the implementor of a higher-order function to do this.
* <p>
* Also, as of 2.0, the <code>Apply</code> is no longer used to represent a Condition, since the
* XACML 2.0 specification changed how Condition works. Instead, there is now a
* <code>Condition</code> class that represents both 1.x and 2.0 style Conditions.
*
* @since 1.0
* @author Seth Proctor
*/
public class Apply implements Evaluatable {
// the function used to evaluate the contents of the apply
private Function function;
// the paramaters to the function...ie, the contents of the apply
private List xprs;
/**
* Constructs an <code>Apply</code> instance.
*
* @param function the <code>Function</code> to use in evaluating the elements in the apply
* @param xprs the contents of the apply which will be the parameters to the function, each of
* which is an <code>Expression</code>
*
* @throws IllegalArgumentException if the input expressions don't match the signature of the
* function
*/
public Apply(Function function, List xprs) throws IllegalArgumentException {
// check that the given inputs work for the function
function.checkInputs(xprs);
// if everything checks out, then store the inputs
this.function = function;
this.xprs = Collections.unmodifiableList(new ArrayList(xprs));
}
/**
* Constructs an <code>Apply</code> instance.
*
* @deprecated As of 2.0 <code>Apply</code> is no longer used for Conditions, so the
* <code>isCondition</code> parameter is no longer needed. You should now use the 2
* parameter constructor. This constructor will be removed in a future release.
*
* @param function the <code>Function</code> to use in evaluating the elements in the apply
* @param xprs the contents of the apply which will be the parameters to the function, each of
* which is an <code>Expression</code>
* @param isCondition as of 2.0, this must always be false
*
* @throws IllegalArgumentException if the input expressions don't match the signature of the
* function or if <code>isCondition</code> is true
*/
public Apply(Function function, List xprs, boolean isCondition) throws IllegalArgumentException {
// make sure that no is using this constructor to create a Condition
if (isCondition)
throw new IllegalArgumentException("As of version 2.0 an Apply"
+ " may not represent a" + " Condition");
// check that the given inputs work for the function
function.checkInputs(xprs);
// if everything checks out, then store the inputs
this.function = function;
this.xprs = Collections.unmodifiableList(new ArrayList(xprs));
}
/**
* Returns an instance of an <code>Apply</code> based on the given DOM root node. This will
* actually return a special kind of <code>Apply</code>, namely an XML ConditionType, which is
* the root of the condition logic in a RuleType. A ConditionType is the same as an ApplyType
* except that it must use a FunctionId that returns a boolean value.
* <p>
* Note that as of 2.0 there is a separate <code>Condition</code> class used to support the
* different kinds of Conditions in XACML 1.x and 2.0. As such, the system no longer treats a
* ConditionType as a special kind of ApplyType. You may still use this method to get a 1.x
* style ConditionType, but you will need to convert it into a <code>Condition</code> to use it
* in evaluation. The preferred way to create a Condition is now through the
* <code>getInstance</code> method on <code>Condition</code>.
*
* @param root the DOM root of a ConditionType XML type
* @param xpathVersion the XPath version to use in any selectors or XPath functions, or null if
* this is unspecified (ie, not supplied in the defaults section of the policy)
* @param manager <code>VariableManager</code> used to connect references and definitions while
* parsing
*
* @throws ParsingException if this is not a valid ConditionType
*
* @return an instance of Apply based on the given DOM root node
*/
public static Apply getConditionInstance(Node root, String xpathVersion, VariableManager manager)
throws ParsingException {
return getInstance(root, FunctionFactory.getConditionInstance(), new PolicyMetaData(
XACMLConstants.XACML_1_0_IDENTIFIER, xpathVersion), manager);
}
/**
* Returns an instance of an <code>Apply</code> based on the given DOM root node. This will
* actually return a special kind of <code>Apply</code>, namely an XML ConditionType, which is
* the root of the condition logic in a RuleType. A ConditionType is the same as an ApplyType
* except that it must use a FunctionId that returns a boolean value.
*
* @deprecated As of 2.0 you should avoid using this method, since it does not provide a
* <code>Condition</code> instance and does not handle XACML 2.0 policies correctly.
* If you need a similar method you can use the new version that accepts a
* <code>VariableManager</code>. This will return an <code>Apply</code> instance for
* XACML 1.x policies.
*
* @param root the DOM root of a ConditionType XML type
* @param xpathVersion the XPath version to use in any selectors or XPath functions, or null if
* this is unspecified (ie, not supplied in the defaults section of the policy)
*
* @throws ParsingException if this is not a valid ConditionType
* @return an instance of Apply based on the given DOM root node
*/
public static Apply getConditionInstance(Node root, String xpathVersion)
throws ParsingException {
return getInstance(root, FunctionFactory.getConditionInstance(), new PolicyMetaData(
XACMLConstants.XACML_1_0_IDENTIFIER, xpathVersion), null);
}
/**
* Returns an instance of <code>Apply</code> based on the given DOM root.
*
* @param root the DOM root of an ApplyType XML type
* @param metaData the meta-data associated with the containing policy
* @param manager <code>VariableManager</code> used to connect references and definitions while
* parsing
*
* @throws ParsingException if this is not a valid ApplyType
* @return an instance of Apply based on the given DOM root node
*/
public static Apply getInstance(Node root, PolicyMetaData metaData, VariableManager manager)
throws ParsingException {
return getInstance(root, FunctionFactory.getGeneralInstance(), metaData, manager);
}
/**
* Returns an instance of <code>Apply</code> based on the given DOM root.
*
* @deprecated As of 2.0 you should avoid using this method, since it does not handle XACML 2.0
* policies correctly. If you need a similar method you can use the new version that
* accepts a <code>VariableManager</code>. This will return an <code>Apply</code>
* instance for XACML 1.x policies.
*
* @param root the DOM root of an ApplyType XML type
* @param xpathVersion the XPath version to use in any selectors or XPath functions, or null if
* this is unspecified (ie, not supplied in the defaults section of the policy)
* @return an instance of Apply based on the given DOM root node
*
* @throws ParsingException if this is not a valid ApplyType
*/
public static Apply getInstance(Node root, String xpathVersion) throws ParsingException {
return getInstance(root, FunctionFactory.getGeneralInstance(), new PolicyMetaData(
XACMLConstants.XACML_1_0_IDENTIFIER, xpathVersion), null);
}
/**
* This is a helper method that is called by the two getInstance methods. It takes a factory so
* we know that we're getting the right kind of function.
*/
private static Apply getInstance(Node root, FunctionFactory factory, PolicyMetaData metaData,
VariableManager manager) throws ParsingException {
Function function = ExpressionHandler.getFunction(root, metaData, factory);
List xprs = new ArrayList();
NodeList nodes = root.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
Expression xpr = ExpressionHandler.parseExpression(nodes.item(i), metaData, manager);
if (xpr != null)
xprs.add(xpr);
}
return new Apply(function, xprs);
}
/**
* Returns the <code>Function</code> used by this <code>Apply</code>.
*
* @return the <code>Function</code>
*/
public Function getFunction() {
return function;
}
/**
* Returns the <code>List</code> of children for this <code>Apply</code>. The <code>List</code>
* contains <code>Expression</code>s. The list is unmodifiable, and may be empty.
*
* @return a <code>List</code> of <code>Expression</code>s
*/
public List getChildren() {
return xprs;
}
/**
* Returns whether or not this ApplyType is actually a ConditionType. As of 2.0 this always
* returns false;
*
* @deprecated As of 2.0 this method should not be used, since an <code>Apply</code> is never a
* Condition.
*
* @return false
*/
public boolean isCondition() {
return false;
}
/**
* Evaluates the apply object using the given function. This will in turn call evaluate on all
* the given parameters, some of which may be other <code>Apply</code> objects.
*
* @param context the representation of the request
*
* @return the result of trying to evaluate this apply object
*/
public EvaluationResult evaluate(EvaluationCtx context) {
// Note that prior to the 2.0 codebase, this method was much more
// complex, pre-evaluating the higher-order functions. Because this
// was never really the right behavior (there's no reason that a
// function can only be at the start of an Apply), we no longer make
// assumptions at this point, so the higher order functions are
// left to evaluate their own parameters.
return function.evaluate(xprs, context);
}
/**
* Returns the type of attribute that this object will return on a call to <code>evaluate</code>
* . In practice, this will always be the same as the result of calling
* <code>getReturnType</code> on the function used by this object.
*
* @return the type returned by <code>evaluate</code>
*/
public URI getType() {
return function.getReturnType();
}
/**
* Returns whether or not the <code>Function</code> will return a bag of values on evaluation.
*
* @return true if evaluation will return a bag of values, false otherwise
*/
public boolean returnsBag() {
return function.returnsBag();
}
/**
* Returns whether or not the <code>Function</code> will return a bag of values on evaluation.
*
*
* @deprecated As of 2.0, you should use the <code>returnsBag</code> method from the
* super-interface <code>Expression</code>.
*
* @return true if evaluation will return a bag of values, false otherwise
*/
public boolean evaluatesToBag() {
return function.returnsBag();
}
/**
* Encodes this <code>Apply</code> into its XML form
*
* @return <code>String</code>
*/
public String encode() {
StringBuilder builder = new StringBuilder();
encode(builder);
return builder.toString();
}
/**
* Encodes this <code>Apply</code> into its XML form and writes this out to the provided
* <code>StringBuilder</code>
*
* @param builder string stream into which the XML-encoded data is written
*/
public void encode(StringBuilder builder) {
builder.append("<Apply FunctionId=\"").append(function.getIdentifier()).append("\">\n");
Iterator it = xprs.iterator();
while (it.hasNext()) {
Expression xpr = (Expression) (it.next());
xpr.encode(builder);
}
builder.append("</Apply>\n");
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.instance;
import com.hazelcast.config.Config;
import com.hazelcast.config.XmlConfigBuilder;
import com.hazelcast.core.DuplicateInstanceNameException;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.Member;
import com.hazelcast.internal.jmx.ManagementService;
import com.hazelcast.spi.annotation.PrivateApi;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.util.EmptyStatement;
import com.hazelcast.util.ExceptionUtil;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import static com.hazelcast.core.LifecycleEvent.LifecycleState.STARTED;
import static com.hazelcast.util.Preconditions.checkHasText;
import static com.hazelcast.util.SetUtil.createHashSet;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Central manager for all Hazelcast members of the JVM.
*
* All creation functionality will be stored here and a particular instance of a member will delegate here.
*/
@PrivateApi
@SuppressWarnings("SynchronizationOnStaticField")
public final class HazelcastInstanceFactory {
private static final int ADDITIONAL_SLEEP_SECONDS_FOR_NON_FIRST_MEMBERS = 4;
private static final AtomicInteger FACTORY_ID_GEN = new AtomicInteger();
private static final ConcurrentMap<String, InstanceFuture> INSTANCE_MAP = new ConcurrentHashMap<String, InstanceFuture>(5);
private HazelcastInstanceFactory() {
}
public static Set<HazelcastInstance> getAllHazelcastInstances() {
Set<HazelcastInstance> result = createHashSet(INSTANCE_MAP.size());
for (InstanceFuture f : INSTANCE_MAP.values()) {
result.add(f.get());
}
return result;
}
public static HazelcastInstance getHazelcastInstance(String instanceName) {
InstanceFuture instanceFuture = INSTANCE_MAP.get(instanceName);
if (instanceFuture == null) {
return null;
}
try {
return instanceFuture.get();
} catch (IllegalStateException t) {
return null;
}
}
public static HazelcastInstance getOrCreateHazelcastInstance(Config config) {
if (config == null) {
config = new XmlConfigBuilder().build();
}
String name = config.getInstanceName();
checkHasText(name, "instanceName must contain text");
InstanceFuture future = INSTANCE_MAP.get(name);
if (future != null) {
return future.get();
}
future = new InstanceFuture();
InstanceFuture found = INSTANCE_MAP.putIfAbsent(name, future);
if (found != null) {
return found.get();
}
try {
return constructHazelcastInstance(config, name, new DefaultNodeContext(), future);
} catch (Throwable t) {
INSTANCE_MAP.remove(name, future);
future.setFailure(t);
throw ExceptionUtil.rethrow(t);
}
}
/**
* Creates a new Hazelcast instance.
*
* @param config the configuration to use; if <code>null</code>, the set of defaults
* as specified in the XSD for the configuration XML will be used.
* @return the configured {@link HazelcastInstance}
*/
public static HazelcastInstance newHazelcastInstance(Config config) {
if (config == null) {
config = new XmlConfigBuilder().build();
}
return newHazelcastInstance(
config,
config.getInstanceName(),
new DefaultNodeContext()
);
}
public static String createInstanceName(Config config) {
return "_hzInstance_" + FACTORY_ID_GEN.incrementAndGet() + "_" + config.getGroupConfig().getName();
}
/**
* Return real name for the hazelcast instance's instance
*
* @param instanceName - template of the name
* @param config - config
* @return - real hazelcast instance's name
*/
public static String getInstanceName(String instanceName, Config config) {
String name = instanceName;
if (name == null || name.trim().length() == 0) {
name = createInstanceName(config);
}
return name;
}
/**
* Creates a new Hazelcast instance.
*
* @param config the configuration to use; if <code>null</code>, the set of defaults
* as specified in the XSD for the configuration XML will be used.
* @param instanceName the name of the {@link HazelcastInstance}
* @param nodeContext the {@link NodeContext} to use
* @return the configured {@link HazelcastInstance}
*/
public static HazelcastInstance newHazelcastInstance(Config config, String instanceName, NodeContext nodeContext) {
if (config == null) {
config = new XmlConfigBuilder().build();
}
String name = getInstanceName(instanceName, config);
InstanceFuture future = new InstanceFuture();
if (INSTANCE_MAP.putIfAbsent(name, future) != null) {
throw new DuplicateInstanceNameException("HazelcastInstance with name '" + name + "' already exists!");
}
try {
return constructHazelcastInstance(config, name, nodeContext, future);
} catch (Throwable t) {
INSTANCE_MAP.remove(name, future);
future.setFailure(t);
throw ExceptionUtil.rethrow(t);
}
}
private static HazelcastInstanceProxy newHazelcastProxy(HazelcastInstanceImpl hazelcastInstance) {
return new HazelcastInstanceProxy(hazelcastInstance);
}
private static HazelcastInstanceProxy constructHazelcastInstance(Config config, String instanceName, NodeContext nodeContext,
InstanceFuture future) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
HazelcastInstanceProxy proxy;
try {
if (classLoader == null) {
Thread.currentThread().setContextClassLoader(HazelcastInstanceFactory.class.getClassLoader());
}
HazelcastInstanceImpl hazelcastInstance = new HazelcastInstanceImpl(instanceName, config, nodeContext);
OutOfMemoryErrorDispatcher.registerServer(hazelcastInstance);
proxy = newHazelcastProxy(hazelcastInstance);
Node node = hazelcastInstance.node;
boolean firstMember = isFirstMember(node);
long initialWaitSeconds = node.getProperties().getSeconds(GroupProperty.INITIAL_WAIT_SECONDS);
if (initialWaitSeconds > 0) {
hazelcastInstance.logger.info(format("Waiting %d seconds before completing HazelcastInstance startup...",
initialWaitSeconds));
try {
SECONDS.sleep(initialWaitSeconds);
if (firstMember) {
node.partitionService.firstArrangement();
} else {
SECONDS.sleep(ADDITIONAL_SLEEP_SECONDS_FOR_NON_FIRST_MEMBERS);
}
} catch (InterruptedException ignored) {
EmptyStatement.ignore(ignored);
}
}
awaitMinimalClusterSize(hazelcastInstance, node, firstMember);
future.set(proxy);
hazelcastInstance.lifecycleService.fireLifecycleEvent(STARTED);
} catch (Throwable t) {
throw ExceptionUtil.rethrow(t);
} finally {
Thread.currentThread().setContextClassLoader(classLoader);
}
return proxy;
}
private static boolean isFirstMember(Node node) {
Iterator<Member> iterator = node.getClusterService().getMembers().iterator();
return (iterator.hasNext() && iterator.next().localMember());
}
private static void awaitMinimalClusterSize(HazelcastInstanceImpl hazelcastInstance, Node node, boolean firstMember)
throws InterruptedException {
int initialMinClusterSize = node.getProperties().getInteger(GroupProperty.INITIAL_MIN_CLUSTER_SIZE);
while (node.getClusterService().getSize() < initialMinClusterSize) {
try {
hazelcastInstance.logger.info("HazelcastInstance waiting for cluster size of " + initialMinClusterSize);
// noinspection BusyWait
SECONDS.sleep(1);
} catch (InterruptedException ignored) {
EmptyStatement.ignore(ignored);
}
}
if (initialMinClusterSize > 1) {
if (firstMember) {
node.partitionService.firstArrangement();
} else {
SECONDS.sleep(3);
}
hazelcastInstance.logger.info("HazelcastInstance starting after waiting for cluster size of "
+ initialMinClusterSize);
}
}
public static void shutdownAll() {
shutdownAll(false);
}
public static void terminateAll() {
shutdownAll(true);
}
private static void shutdownAll(boolean terminate) {
List<HazelcastInstanceProxy> instances = new LinkedList<HazelcastInstanceProxy>();
for (InstanceFuture future : INSTANCE_MAP.values()) {
try {
HazelcastInstanceProxy instanceProxy = future.get();
instances.add(instanceProxy);
} catch (RuntimeException ignored) {
EmptyStatement.ignore(ignored);
}
}
INSTANCE_MAP.clear();
OutOfMemoryErrorDispatcher.clearServers();
ManagementService.shutdownAll(instances);
Collections.sort(instances, new Comparator<HazelcastInstanceProxy>() {
public int compare(HazelcastInstanceProxy o1, HazelcastInstanceProxy o2) {
return o1.getName().compareTo(o2.getName());
}
});
for (HazelcastInstanceProxy proxy : instances) {
if (terminate) {
proxy.getLifecycleService().terminate();
} else {
proxy.getLifecycleService().shutdown();
}
proxy.original = null;
}
}
public static void remove(HazelcastInstanceImpl instance) {
OutOfMemoryErrorDispatcher.deregisterServer(instance);
InstanceFuture future = INSTANCE_MAP.remove(instance.getName());
if (future != null && future.isSet()) {
future.get().original = null;
}
if (INSTANCE_MAP.size() == 0) {
ManagementService.shutdown(instance.getName());
}
}
private static class InstanceFuture {
private volatile HazelcastInstanceProxy hz;
private volatile Throwable throwable;
HazelcastInstanceProxy get() {
if (hz != null) {
return hz;
}
boolean restoreInterrupt = false;
synchronized (this) {
while (hz == null && throwable == null) {
try {
wait();
} catch (InterruptedException ignored) {
restoreInterrupt = true;
}
}
}
if (restoreInterrupt) {
Thread.currentThread().interrupt();
}
if (hz != null) {
return hz;
}
throw new IllegalStateException(throwable);
}
void set(HazelcastInstanceProxy proxy) {
synchronized (this) {
this.hz = proxy;
notifyAll();
}
}
public void setFailure(Throwable throwable) {
synchronized (this) {
this.throwable = throwable;
notifyAll();
}
}
boolean isSet() {
return hz != null;
}
}
}
| |
package com.backend.collab;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.jetty.websocket.api.Session;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketClose;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketConnect;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketError;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage;
import org.eclipse.jetty.websocket.api.annotations.WebSocket;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.backend.collab.SessionManager;
import static com.backend.collab.SessionManager.getISM;
/**
* Implements the websocket for maintaining shared sessions, passes received messages
* onto the SessionManager instance after minimal parsing
* @author root
*
*/
@WebSocket(maxTextMessageSize = 64 * 1024)
public class WebSocketHandler
{
static Logger logger = Logger.getLogger(WebSocketHandler.class.getName());
private final JsonParser jp = new JsonParser();
@OnWebSocketConnect
public void open(Session session)
{
getISM().openSocket(session);
}
@OnWebSocketClose
public void onClose(Session session, int status, String reason) {
System.out.println("Close: statusCode=" + status + ", reason=" + reason);
getISM().closeSocket(session);
}
@OnWebSocketError
public void onError(Throwable error)
{
System.out.println("~~~~~~~~~~~~~~~~~~! SERIOUS ERROR !~~~~~~~~~~~~~~~~~~");
error.printStackTrace();
}
/**
* The function invoked by the websocket whenever a new message is received
* @param message - the JSON encoded message
* @param session - the websocket session
*/
@OnWebSocketMessage
public void handleMessage(Session session, String message)
{
try
{
//parse the Stringified message into a Json object again
JsonObject msg = jp.parse(message).getAsJsonObject();
JsonObject params = msg.getAsJsonObject("params");
//no message type specified, do nothing
JsonElement ele = msg.get("type");
if (ele == null) return;
//all messages have a type
String type = ele.getAsString();
if (SessionManager.DEBUG)
{
logger.log(Level.INFO, "Request type: "+type+" "+msg.toString());
}
//filter TEST messages
if (type.startsWith("TEST"))
{
getISM().testSync(session,type,params);
return;
}
switch (type)
{
case "schat":
case "update-status":
case "update-tiers":
if (params == null)
return;
getISM().notifySessionUsers(session, params);
break;
case "update-saved-config":
if (params == null)
return;
getISM().updateSavedConfig(session,params);
break;
case "session-nominate-leader":
if (params == null)
return;
getISM().setSessionLeader(session, params);
break;
case "register-remote-source":
if (params == null)
return;
getISM().registerRemoteSource(session, params);
break;
case "session-blacklist-update":
if (params == null)
return;
getISM().setSessionBlacklist(session, params);
break;
case "session-invite-user":
if (params == null)
return;
getISM().doSessionInviteUser(session, params);
break;
case "session-kick-user":
if (params == null)
return;
getISM().doSessionKickUser(session, params);
break;
case "reset-user-password":
if (params == null)
return;
getISM().resetUserPassword(session, params);
break;
case "change-user-password":
if (params == null)
return;
getISM().changeUserPassword(session, params);
break;
case "change-user-email":
if (params == null)
return;
getISM().changeUserEmail(session, params);
break;
case "change-user-nickname":
if (params == null)
return;
getISM().changeUserNickname(session, params);
break;
case "send-group-message":
if (params == null)
return;
getISM().sendGroupEmail(session, params);
break;
case "leave-group":
if (params == null)
return;
getISM().removeGroupUser(session, params);
break;
case "remove-group-file-acl":
if (params == null)
return;
getISM().removeGroupFileACL(session, params);
break;
case "set-group-user-acl":
if (params == null)
return;
getISM().updateGroupUserACL(session, params);
break;
case "remove-group-user":
if (params == null)
return;
getISM().removeGroupUser(session, params);
break;
case "create-group":
if (params == null)
return;
getISM().createGroup(session, params);
break;
case "delete-group":
if (params == null)
return;
getISM().deleteGroup(session, params);
break;
case "add-group-user":
if (params == null)
return;
getISM().addUserToGroup(session, params);
break;
case "share-group-file":
if (params == null)
return;
getISM().shareGroupFile(session, params);
break;
case "get-group-info":
if (params == null)
return;
getISM().getGroupInfo(session, params);
break;
case "get-user-groups":
getISM().getUserGroups(session);
break;
case "get-all-comments":
if (params == null)
return;
getISM().getAllComments(session, params);
break;
case "delete-comment":
if (params == null)
return;
getISM().deleteComment(session, params);
break;
case "make-comment":
if (params == null)
return;
getISM().makeComment(session, params);
break;
case "leader-options":
if (params == null)
return;
getISM().sessionSetOption(session, params);
break;
case "resume-session":
if (params == null)
return;
getISM().doResumeSession(session, params);
break;
case "create-session":
if (params == null)
return;
getISM().newSession(session,params);
break;
case "end-session":
getISM().closeSession(session);
break;
case "join-session":
if (params == null)
return;
getISM().joinSession(session, params);
break;
case "register-user":
if (params == null)
return;
getISM().doRegister(session, params);
break;
case "get-user-files":
getISM().getUserFiles(session);
break;
case "get-public-files":
getISM().getPublicFiles(session);
break;
case "get-public-sessions":
getISM().getPublicSessions(session);
break;
case "delete-user-file":
if (params == null)
return;
getISM().deleteUserFile(session, params);
break;
case "toggle-user-file-public":
if (params == null)
return;
getISM().setUserFilePublicStatus(session, params);
break;
case "login-user":
if (params == null)
return;
getISM().doLogin(session, params);
break;
case "logout-user":
getISM().doLogout(session);
break;
case "leave-session":
getISM().leaveSession(session);
break;
default:
break;
}
}
catch (Exception e)
{
//logs the exceptions in /opt/tomcat8/logs/catalina.out
logger.log(Level.SEVERE, null, e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hops.erasure_coding;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.util.ReflectionUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A class with the information of a raid codec.
* A raid codec has the information of
* 1. Which ErasureCode used
* 2. Stripe and parity length
* 3. Parity directory location
* 4. Codec priority
*/
public class Codec implements Serializable {
public static final Log LOG = LogFactory.getLog(Codec.class);
public static final String ERASURE_CODE_KEY_PREFIX =
"hdfs.raid.erasure.code.";
/**
* Used by ErasureCode.init() to get Code specific extra parameters.
*/
public final JSONObject json;
/**
* id of the codec. Used by policy in raid.xml
*/
public final String id;
/**
* Number of blocks in one stripe
*/
public final int stripeLength;
/**
* Number of parity blocks of the codec for one stripe
*/
public final int parityLength;
/**
* The full class name of the ErasureCode used
*/
public final String erasureCodeClass;
/**
* Human readable description of the codec
*/
public final String description;
/**
* Where to store the parity files
*/
public final String parityDirectory;
/**
* Priority of the codec.
* <p/>
* Purge parity files:
* When parity files of two Codecs exists, the parity files of the lower
* priority codec will be purged.
* <p/>
* Generating parity files:
* When a source files are under two policies, the policy with a higher
* codec priority will be triggered.
*/
public final int priority;
private static List<Codec> codecs;
private static Map<String, Codec> idToCodec;
/**
* Get single instantce of the list of codecs ordered by priority.
*/
public static List<Codec> getCodecs() {
return Codec.codecs;
}
/**
* Get the instance of the codec by id
*/
public static Codec getCodec(String id) {
return idToCodec.get(id);
}
static {
try {
Configuration.addDefaultResource("hdfs-default.xml");
Configuration.addDefaultResource("hdfs-site.xml");
Configuration.addDefaultResource("erasure-coding-default.xml");
Configuration.addDefaultResource("erasure-coding-site.xml");
initializeCodecs(new Configuration());
} catch (Exception e) {
throw new RuntimeException("Failed to initialize erasure coding codecs",
e);
}
}
public static void initializeCodecs(Configuration conf) throws IOException {
try {
String source = conf.get(DFSConfigKeys.ERASURE_CODING_CODECS_KEY);
if (source == null) {
codecs = Collections.emptyList();
idToCodec = Collections.emptyMap();
if (LOG.isDebugEnabled()) {
LOG.info("No codec is specified");
}
return;
}
JSONArray jsonArray = new JSONArray(source);
List<Codec> localCodecs = new ArrayList<Codec>();
Map<String, Codec> localIdToCodec = new HashMap<String, Codec>();
for (int i = 0; i < jsonArray.length(); ++i) {
Codec codec = new Codec(jsonArray.getJSONObject(i));
localIdToCodec.put(codec.id, codec);
localCodecs.add(codec);
}
Collections.sort(localCodecs, new Comparator<Codec>() {
@Override
public int compare(Codec c1, Codec c2) {
// Higher priority on top
return c2.priority - c1.priority;
}
});
codecs = Collections.unmodifiableList(localCodecs);
idToCodec = Collections.unmodifiableMap(localIdToCodec);
} catch (JSONException e) {
throw new IOException(e);
}
}
public Codec(JSONObject json) throws JSONException {
this.json = json;
this.id = json.getString("id");
this.parityLength = json.getInt("parity_length");
this.stripeLength = json.getInt("stripe_length");
this.erasureCodeClass = json.getString("erasure_code");
this.parityDirectory = json.getString("parity_dir");
this.priority = json.getInt("priority");
this.description = getJSONString(json, "description", "");
checkDirectory(parityDirectory);
}
/**
* Make sure the direcotry string has the format "/a/b/c"
*/
private void checkDirectory(String d) {
if (!d.startsWith(Path.SEPARATOR)) {
throw new IllegalArgumentException("Bad directory:" + d);
}
if (d.endsWith(Path.SEPARATOR)) {
throw new IllegalArgumentException("Bad directory:" + d);
}
}
static private String getJSONString(JSONObject json, String key,
String defaultResult) {
String result = defaultResult;
try {
result = json.getString(key);
} catch (JSONException e) {
}
return result;
}
public ErasureCode createErasureCode(Configuration conf) {
// Create the scheduler
Class<?> erasureCode = null;
try {
erasureCode = conf.getClass(ERASURE_CODE_KEY_PREFIX + this.id,
conf.getClassByName(this.erasureCodeClass));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
ErasureCode code =
(ErasureCode) ReflectionUtils.newInstance(erasureCode, conf);
code.init(this);
return code;
}
@Override
public String toString() {
if (json == null) {
return "Test codec " + id;
} else {
return json.toString();
}
}
public String getParityPrefix() {
String prefix = this.parityDirectory;
if (!prefix.endsWith(Path.SEPARATOR)) {
prefix += Path.SEPARATOR;
}
return prefix;
}
public int getStripeLength() {
return stripeLength;
}
public int getParityLength() {
return parityLength;
}
public String getId() {
return id;
}
}
| |
package com.indeed.proctor.store;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import com.indeed.proctor.common.model.TestMatrixVersion;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNLogEntry;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNRevisionProperty;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNLogClient;
import org.tmatesoft.svn.core.wc.SVNRevision;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
public class SvnProctor extends FileBasedProctorStore {
private static final Logger LOGGER = Logger.getLogger(SvnProctor.class);
/* Storage Schema:
${svnPath}/
test-definitions/
test-name-one/
definition.json
metadata.json
test-name-two/
definition.json
metadata.json
*/
private final SVNClientManager clientManager;
private final SVNRepository repo;
private final SVNURL svnUrl;
public SvnProctor(final String svnPath,
final String username,
final String password) throws IOException {
this(new SvnPersisterCoreImpl(svnPath, username, password, Files.createTempDir()));
}
public SvnProctor(final SvnPersisterCore core) {
super(core);
this.clientManager = core.getClientManager();
this.repo = core.getRepo();
this.svnUrl = core.getSvnUrl();
}
@Override
public List<Revision> getHistory(final String test, final int start, final int limit) throws StoreException {
try {
final long latestRevision = repo.getLatestRevision();
return getHistory(test, String.valueOf(latestRevision), start, limit);
} catch (final SVNException e) {
throw new RuntimeException("Unable to get older revisions for " + test, e);
}
}
@Override
public List<Revision> getHistory(String test, String version, final int start, int limit) throws StoreException {
final Long revision = SvnPersisterCoreImpl.parseRevisionOrDie(version);
try {
// check path before executing svn log
final String testPath = TEST_DEFINITIONS_DIRECTORY + "/" + test;
final SVNNodeKind kind = repo.checkPath(testPath, revision);
if(kind == SVNNodeKind.NONE) {
return Collections.emptyList();
}
final String[] targetPaths = { testPath };
final SVNRevision svnRevision = SVNRevision.create(revision);
return getSVNLogs(targetPaths, svnRevision, start, limit);
} catch (final SVNException e) {
throw new RuntimeException("Unable to get older revisions for " + test + " r" + revision, e);
}
}
@Override
public String getLatestVersion() throws StoreException {
try {
final String[] targetPaths = { };
final SVNRevision svnRevision = SVNRevision.HEAD;
final SVNLogClient logClient = clientManager.getLogClient();
final FilterableSVNLogEntryHandler handler = new FilterableSVNLogEntryHandler();
// In order to get history is "descending" order, the startRevision should be the one closer to HEAD
logClient.doLog(svnUrl, targetPaths, /* pegRevision */ SVNRevision.HEAD, svnRevision, SVNRevision.create(1),
/* stopOnCopy */ false, /* discoverChangedPaths */ false, /* includeMergedRevisions */ false,
/* limit */ 1,
new String[]{SVNRevisionProperty.LOG}, handler);
final SVNLogEntry entry = handler.getLogEntries().size() > 0 ? handler.getLogEntries().get(0) : null;
return entry == null ? "-1" : String.valueOf(entry.getRevision());
} catch (SVNException e) {
throw new RuntimeException("Unable to get latest revision", e);
}
}
@Override
public boolean cleanUserWorkspace(String username) {
return getSvnCore().cleanUserWorkspace(username);
}
@Override
public List<Revision> getMatrixHistory(final int start, final int limit) throws StoreException {
final String[] targetPaths = { };
return getSVNLogs(targetPaths, SVNRevision.HEAD, start, limit);
}
private List<Revision> getSVNLogs(final String[] paths, final SVNRevision startRevision, final int start, final int limit) throws StoreException.ReadException {
try {
final SVNLogClient logClient = clientManager.getLogClient();
final FilterableSVNLogEntryHandler handler = new FilterableSVNLogEntryHandler();
// In order to get history is "descending" order, the startRevision should be the one closer to HEAD
logClient.doLog(svnUrl, paths, /* pegRevision */ SVNRevision.HEAD, startRevision, SVNRevision.create(1),
/* stopOnCopy */ false, /* discoverChangedPaths */ false, /* includeMergedRevisions */ false,
/* limit */ start + limit,
new String[]{SVNRevisionProperty.LOG, SVNRevisionProperty.AUTHOR, SVNRevisionProperty.DATE}, handler);
final List<SVNLogEntry> entries = handler.getLogEntries();
final List<Revision> revisions;
if (entries.size() <= start) {
revisions = Collections.emptyList();
} else {
final int end = Math.min(start + limit, entries.size());
revisions = Lists.newArrayListWithCapacity(end - start);
for (int i = 0; i < end - start; i++) {
final SVNLogEntry entry = entries.get(start + i);
revisions.add(new Revision(String.valueOf(entry.getRevision()), entry.getAuthor(), entry.getDate(), entry.getMessage()));
}
}
return revisions;
} catch (final SVNException e) {
throw new StoreException.ReadException("Unable to get older revisions");
}
}
@Override
public void verifySetup() throws StoreException {
try {
final long latestRevision = repo.getLatestRevision();
if(latestRevision <= 0) {
throw new StoreException("Found non-positive revision (" + latestRevision + ") for svn-path: " + repo.getLocation());
}
} catch (SVNException e) {
throw new StoreException("Failed to get latest revision for svn-path: " + repo.getLocation(), e);
}
}
@Override
public String toString() {
return core.toString();
}
private SvnPersisterCore getSvnCore() {
return (SvnPersisterCore) this.core;
}
public static void main(String args[]) throws IOException {
final String svnpath = System.console().readLine("svn path: ");
final String svnuser = System.console().readLine("user: ");
final String password = new String(System.console().readPassword("password: "));
final boolean usecache = "y".equals(System.console().readLine("cache (y/n): "));
final int num_revisions = Integer.parseInt(System.console().readLine("number of histories: "));
final File tempDir = Files.createTempDir();
try {
final SvnPersisterCoreImpl core = new SvnPersisterCoreImpl(svnpath, svnuser, password, tempDir);
final SvnPersisterCore core1;
if(usecache) {
core1 = new CachedSvnPersisterCore(core);
} else {
core1 = core;
}
final SvnProctor client = new SvnProctor(core1);
System.out.println("Running load matrix for last " + num_revisions + " revisions");
final long start = System.currentTimeMillis();
final List<Revision> revisions = client.getMatrixHistory(0, num_revisions);
for(final Revision rev : revisions) {
final TestMatrixVersion matrix = client.getTestMatrix(rev.getRevision());
}
final long elapsed = System.currentTimeMillis() - start;
System.out.println("Finished reading matrix history (" + revisions.size() + ") in " + elapsed + " ms");
client.close();
} catch (StoreException e) {
e.printStackTrace(System.err);
LOGGER.error(e);
} finally {
System.out.println("Deleting temp dir : " + tempDir);
FileUtils.deleteDirectory(tempDir);
}
}
}
| |
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.task;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.BeanUtils;
import org.springframework.boot.context.properties.PropertyMapper;
import org.springframework.core.task.TaskDecorator;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* Builder that can be used to configure and create a {@link TaskExecutor}. Provides
* convenience methods to set common {@link ThreadPoolTaskExecutor} settings and register
* {@link #taskDecorator(TaskDecorator)}). For advanced configuration, consider using
* {@link TaskExecutorCustomizer}.
* <p>
* In a typical auto-configured Spring Boot application this builder is available as a
* bean and can be injected whenever a {@link TaskExecutor} is needed.
*
* @author Stephane Nicoll
* @since 2.1.0
*/
public class TaskExecutorBuilder {
private final Integer queueCapacity;
private final Integer corePoolSize;
private final Integer maxPoolSize;
private final Boolean allowCoreThreadTimeOut;
private final Duration keepAlive;
private final String threadNamePrefix;
private final TaskDecorator taskDecorator;
private final Set<TaskExecutorCustomizer> customizers;
public TaskExecutorBuilder() {
this.queueCapacity = null;
this.corePoolSize = null;
this.maxPoolSize = null;
this.allowCoreThreadTimeOut = null;
this.keepAlive = null;
this.threadNamePrefix = null;
this.taskDecorator = null;
this.customizers = null;
}
private TaskExecutorBuilder(Integer queueCapacity, Integer corePoolSize,
Integer maxPoolSize, Boolean allowCoreThreadTimeOut, Duration keepAlive,
String threadNamePrefix, TaskDecorator taskDecorator,
Set<TaskExecutorCustomizer> customizers) {
this.queueCapacity = queueCapacity;
this.corePoolSize = corePoolSize;
this.maxPoolSize = maxPoolSize;
this.allowCoreThreadTimeOut = allowCoreThreadTimeOut;
this.keepAlive = keepAlive;
this.threadNamePrefix = threadNamePrefix;
this.taskDecorator = taskDecorator;
this.customizers = customizers;
}
/**
* Set the capacity of the queue. An unbounded capacity does not increase the pool and
* therefore ignores {@link #maxPoolSize(int) maxPoolSize}.
* @param queueCapacity the queue capacity to set
* @return a new builder instance
*/
public TaskExecutorBuilder queueCapacity(int queueCapacity) {
return new TaskExecutorBuilder(queueCapacity, this.corePoolSize, this.maxPoolSize,
this.allowCoreThreadTimeOut, this.keepAlive, this.threadNamePrefix,
this.taskDecorator, this.customizers);
}
/**
* Set the core number of threads. Effectively that maximum number of threads as long
* as the queue is not full.
* <p>
* Core threads can grow and shrink if {@link #allowCoreThreadTimeOut(boolean)} is
* enabled.
* @param corePoolSize the core pool size to set
* @return a new builder instance
*/
public TaskExecutorBuilder corePoolSize(int corePoolSize) {
return new TaskExecutorBuilder(this.queueCapacity, corePoolSize, this.maxPoolSize,
this.allowCoreThreadTimeOut, this.keepAlive, this.threadNamePrefix,
this.taskDecorator, this.customizers);
}
/**
* Set the maximum allowed number of threads. When the {@link #queueCapacity(int)
* queue} is full, the pool can expand up to that size to accommodate the load.
* <p>
* If the {@link #queueCapacity(int) queue capacity} is unbounded, this setting is
* ignored.
* @param maxPoolSize the max pool size to set
* @return a new builder instance
*/
public TaskExecutorBuilder maxPoolSize(int maxPoolSize) {
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize, maxPoolSize,
this.allowCoreThreadTimeOut, this.keepAlive, this.threadNamePrefix,
this.taskDecorator, this.customizers);
}
/**
* Set whether core threads are allow to time out. When enabled, this enables dynamic
* growing and shrinking of the pool.
* @param allowCoreThreadTimeOut if core threads are allowed to time out
* @return a new builder instance
*/
public TaskExecutorBuilder allowCoreThreadTimeOut(boolean allowCoreThreadTimeOut) {
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, allowCoreThreadTimeOut, this.keepAlive,
this.threadNamePrefix, this.taskDecorator, this.customizers);
}
/**
* Set the time limit for which threads may remain idle before being terminated.
* @param keepAlive the keep alive to set
* @return a new builder instance
*/
public TaskExecutorBuilder keepAlive(Duration keepAlive) {
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, this.allowCoreThreadTimeOut, keepAlive,
this.threadNamePrefix, this.taskDecorator, this.customizers);
}
/**
* Set the prefix to use for the names of newly created threads.
* @param threadNamePrefix the thread name prefix to set
* @return a new builder instance
*/
public TaskExecutorBuilder threadNamePrefix(String threadNamePrefix) {
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, this.allowCoreThreadTimeOut, this.keepAlive,
threadNamePrefix, this.taskDecorator, this.customizers);
}
/**
* Set the {@link TaskDecorator} to use or {@code null} to not use any.
* @param taskDecorator the task decorator to use
* @return a new builder instance
*/
public TaskExecutorBuilder taskDecorator(TaskDecorator taskDecorator) {
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, this.allowCoreThreadTimeOut, this.keepAlive,
this.threadNamePrefix, taskDecorator, this.customizers);
}
/**
* Set the {@link TaskExecutorCustomizer TaskExecutorCustomizers} that should be
* applied to the {@link ThreadPoolTaskExecutor}. Customizers are applied in the order
* that they were added after builder configuration has been applied. Setting this
* value will replace any previously configured customizers.
* @param customizers the customizers to set
* @return a new builder instance
* @see #additionalCustomizers(TaskExecutorCustomizer...)
*/
public TaskExecutorBuilder customizers(TaskExecutorCustomizer... customizers) {
Assert.notNull(customizers, "Customizers must not be null");
return customizers(Arrays.asList(customizers));
}
/**
* Set the {@link TaskExecutorCustomizer TaskExecutorCustomizers} that should be
* applied to the {@link ThreadPoolTaskExecutor}. Customizers are applied in the order
* that they were added after builder configuration has been applied. Setting this
* value will replace any previously configured customizers.
* @param customizers the customizers to set
* @return a new builder instance
* @see #additionalCustomizers(TaskExecutorCustomizer...)
*/
public TaskExecutorBuilder customizers(Iterable<TaskExecutorCustomizer> customizers) {
Assert.notNull(customizers, "Customizers must not be null");
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, this.allowCoreThreadTimeOut, this.keepAlive,
this.threadNamePrefix, this.taskDecorator, append(null, customizers));
}
/**
* Add {@link TaskExecutorCustomizer TaskExecutorCustomizers} that should be applied
* to the {@link ThreadPoolTaskExecutor}. Customizers are applied in the order that
* they were added after builder configuration has been applied.
* @param customizers the customizers to add
* @return a new builder instance
* @see #customizers(TaskExecutorCustomizer...)
*/
public TaskExecutorBuilder additionalCustomizers(
TaskExecutorCustomizer... customizers) {
Assert.notNull(customizers, "Customizers must not be null");
return additionalCustomizers(Arrays.asList(customizers));
}
/**
* Add {@link TaskExecutorCustomizer TaskExecutorCustomizers} that should be applied
* to the {@link ThreadPoolTaskExecutor}. Customizers are applied in the order that
* they were added after builder configuration has been applied.
* @param customizers the customizers to add
* @return a new builder instance
* @see #customizers(TaskExecutorCustomizer...)
*/
public TaskExecutorBuilder additionalCustomizers(
Iterable<TaskExecutorCustomizer> customizers) {
Assert.notNull(customizers, "Customizers must not be null");
return new TaskExecutorBuilder(this.queueCapacity, this.corePoolSize,
this.maxPoolSize, this.allowCoreThreadTimeOut, this.keepAlive,
this.threadNamePrefix, this.taskDecorator,
append(this.customizers, customizers));
}
/**
* Build a new {@link ThreadPoolTaskExecutor} instance and configure it using this
* builder.
* @return a configured {@link ThreadPoolTaskExecutor} instance.
* @see #build(Class)
* @see #configure(ThreadPoolTaskExecutor)
*/
public ThreadPoolTaskExecutor build() {
return build(ThreadPoolTaskExecutor.class);
}
/**
* Build a new {@link ThreadPoolTaskExecutor} instance of the specified type and
* configure it using this builder.
* @param <T> the type of task executor
* @param taskExecutorClass the template type to create
* @return a configured {@link ThreadPoolTaskExecutor} instance.
* @see #build()
* @see #configure(ThreadPoolTaskExecutor)
*/
public <T extends ThreadPoolTaskExecutor> T build(Class<T> taskExecutorClass) {
return configure(BeanUtils.instantiateClass(taskExecutorClass));
}
/**
* Configure the provided {@link ThreadPoolTaskExecutor} instance using this builder.
* @param <T> the type of task executor
* @param taskExecutor the {@link ThreadPoolTaskExecutor} to configure
* @return the task executor instance
* @see #build()
* @see #build(Class)
*/
public <T extends ThreadPoolTaskExecutor> T configure(T taskExecutor) {
PropertyMapper map = PropertyMapper.get().alwaysApplyingWhenNonNull();
map.from(this.queueCapacity).to(taskExecutor::setQueueCapacity);
map.from(this.corePoolSize).to(taskExecutor::setCorePoolSize);
map.from(this.maxPoolSize).to(taskExecutor::setMaxPoolSize);
map.from(this.keepAlive).asInt(Duration::getSeconds)
.to(taskExecutor::setKeepAliveSeconds);
map.from(this.allowCoreThreadTimeOut).to(taskExecutor::setAllowCoreThreadTimeOut);
map.from(this.threadNamePrefix).whenHasText()
.to(taskExecutor::setThreadNamePrefix);
map.from(this.taskDecorator).to(taskExecutor::setTaskDecorator);
if (!CollectionUtils.isEmpty(this.customizers)) {
this.customizers.forEach((customizer) -> customizer.customize(taskExecutor));
}
return taskExecutor;
}
private <T> Set<T> append(Set<T> set, Iterable<? extends T> additions) {
Set<T> result = new LinkedHashSet<>((set != null) ? set : Collections.emptySet());
additions.forEach(result::add);
return Collections.unmodifiableSet(result);
}
}
| |
package com.ilscipio.scipio.cms.template;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.GroovyUtil;
import org.ofbiz.base.util.ScriptUtil;
import org.ofbiz.base.util.Scriptlet;
import org.ofbiz.base.util.UtilGenerics;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilProperties;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityOperator;
import org.ofbiz.minilang.SimpleMethod;
import org.ofbiz.minilang.method.MethodContext;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.widget.WidgetWorker;
import org.ofbiz.widget.model.AbstractModelAction;
import org.ofbiz.widget.model.ModelScreen;
import org.ofbiz.widget.model.ScreenFactory;
import com.ilscipio.scipio.cms.CmsException;
import com.ilscipio.scipio.cms.CmsUtil;
import com.ilscipio.scipio.cms.control.CmsControlUtil;
import com.ilscipio.scipio.cms.data.CmsDataException;
import com.ilscipio.scipio.cms.data.CmsDataObject;
import com.ilscipio.scipio.cms.data.CmsEntityVisit;
import com.ilscipio.scipio.cms.data.CmsEntityVisit.CmsEntityVisitor;
import com.ilscipio.scipio.cms.data.CmsEntityVisit.VisitRelation;
import com.ilscipio.scipio.cms.data.CmsEntityVisit.VisitRelations;
import com.ilscipio.scipio.cms.data.CmsMajorObject;
import com.ilscipio.scipio.cms.data.CmsObjectCache;
import com.ilscipio.scipio.cms.data.CmsObjectCache.CacheEntry;
/**
* Script template
* <p>
* TODO: 2016: currently UI and such only supports *.groovy locations and groovy type.
* can easily support more...
*/
public class CmsScriptTemplate extends CmsComplexTemplate implements CmsMajorObject {
private static final long serialVersionUID = -1589382218994735791L;
private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass());
/**
* Fields recognized by service but not physically present in entity.
*/
public static final Set<String> virtualFields = CmsTemplate.virtualFields;
private static final CmsObjectCache<CmsScriptTemplate> idCache = CmsObjectCache.getGlobalCache("cms.template.script.id");
private static final CmsObjectCache<CmsScriptTemplate> nameCache = CmsObjectCache.getGlobalCache("cms.template.script.name");
// WARN: if ever changed, there is code that does not check this bool...
public static final boolean STANDALONE_DEFAULT = true;
private static final EntityCondition standaloneCond;
private static final EntityCondition notStandaloneCond;
static {
if (STANDALONE_DEFAULT) {
standaloneCond = EntityCondition.makeCondition(EntityCondition.makeCondition("standalone", "Y"),
EntityOperator.OR,
EntityCondition.makeCondition("standalone", null));
notStandaloneCond = EntityCondition.makeCondition("standalone", "N");
} else {
standaloneCond = EntityCondition.makeCondition("standalone", "Y");
notStandaloneCond = EntityCondition.makeCondition(EntityCondition.makeCondition("standalone", "N"),
EntityOperator.OR,
EntityCondition.makeCondition("standalone", null));
}
}
private static final String processorLocation = UtilProperties.getPropertyValue("cms", "contentprocessor.script.location");
private static final ScriptExecutor processorScriptExecutor = makeProcessorExecutor();
private ScriptExecutor executor; // 2016: dedicated executor object
private CmsScriptTemplateAssoc assoc; // 2016: backreference to association
protected CmsScriptTemplate(GenericValue entity) {
super(entity);
}
protected CmsScriptTemplate(GenericValue entity, CmsScriptTemplateAssoc assoc) {
super(entity);
this.assoc = assoc;
}
public CmsScriptTemplate(Delegator delegator, Map<String, ?> fields) {
super(delegator, checkFields(fields, true));
}
protected CmsScriptTemplate(Delegator delegator, Map<String, ?> fields, CmsScriptTemplateAssoc assoc) {
super(delegator, checkFields(fields, true));
this.assoc = assoc;
}
protected CmsScriptTemplate(CmsScriptTemplate other, Map<String, Object> copyArgs) {
super(other, copyArgs);
}
@Override
public void update(Map<String, ?> fields, boolean setIfEmpty) {
super.update(checkFields(fields, false), setIfEmpty);
}
@Override
public CmsScriptTemplate copy(Map<String, Object> copyArgs) throws CmsException {
return new CmsScriptTemplate(this, copyArgs);
}
public static CmsScriptTemplate createUpdateScriptTemplate(Delegator delegator, Map<String, ?> fields,
GenericValue userLogin, boolean store) {
Map<String, Object> scriptMap = new HashMap<>(fields);
CmsScriptTemplate scriptTemplate;
String scriptTemplateId = (String) fields.get("scriptTemplateId");
if (UtilValidate.isNotEmpty(scriptTemplateId)) {
scriptTemplate = CmsScriptTemplate.getWorker().findByIdAlways(delegator, scriptTemplateId, false);
scriptTemplate.update(scriptMap, true);
} else {
scriptMap.put("createdBy", userLogin.get("userLoginId"));
scriptTemplate = new CmsScriptTemplate(delegator, scriptMap);
}
if (store) {
scriptTemplate.store();
}
return scriptTemplate;
}
/**
* 2016: Loads ALL this object's content into the current instance.
* <p>
* WARN: IMPORTANT: AFTER THIS CALL,
* NO FURTHER CALLS ARE ALLOWED TO MODIFY THE INSTANCE IN MEMORY.
* Essential for thread safety!!!
*/
@Override
public void preload(PreloadWorker preloadWorker) {
super.preload(preloadWorker);
this.getExecutor();
// NOTE: un-intuitively, we must do the preload for the assoc, because instances store us
// rather than the assoc itself!
preloadWorker.preload(this.assoc);
}
protected static <T> Map<String, T> checkFields(Map<String, T> fields, boolean isNew) throws CmsException {
if (isNew || fields.containsKey("standalone")) {
if (UtilValidate.isEmpty((String) fields.get("standalone"))) {
UtilGenerics.<String, Object> checkMap(fields).put("standalone", STANDALONE_DEFAULT ? "Y" : "N");
}
}
return fields;
}
@Override
public List<CmsAttributeTemplate> getAttributeTemplates() {
return null;
}
@Override
public void addAttributeTemplate(CmsAttributeTemplate template) {
throw new UnsupportedOperationException();
}
@Override
protected void verifyNewFields(Delegator delegator, Map<String, Object> fields, boolean isNew) throws CmsException {
// NOTE: in theory, could want groupingNullSignificant=false here because it makes the duplicate name check more aggressive...
verifyUniqueName(delegator, fields, isNew, "templateName", false, "webSiteId", true, true);
}
public void setWebSiteId(String webSiteId) { // legacy webSiteId field
entity.setString("webSiteId", webSiteId);
}
public String getWebSiteId() { // legacy webSiteId field
return entity.getString("webSiteId");
}
public String getScriptLang() { // 2016: new
return entity.getString("scriptLang");
}
public String getResolvedScriptLang() {
try {
ScriptExecutor executor = this.getExecutor();
return executor.getScriptLang().getName();
} catch (Exception e) {
Debug.logError(e, "Cms: Error determining script language for script '" + getId() + "'", module);
return null;
}
}
public Boolean getStandaloneBoolean() { // 2016: new
return entity.getBoolean("standalone");
}
public boolean isStandalone() {
Boolean standalone = getStandaloneBoolean();
return standalone != null ? standalone : STANDALONE_DEFAULT;
}
public static EntityCondition getStandaloneCond() {
return standaloneCond;
}
public static EntityCondition getNotStandaloneCond() {
return notStandaloneCond;
}
public boolean isOrphan() {
try {
return UtilValidate.isEmpty(getDelegator().findByAnd("CmsPageTemplateScriptAssoc",
UtilMisc.toMap("scriptTemplateId", getId()), null, false)) &&
UtilValidate.isEmpty(getDelegator().findByAnd("CmsAssetTemplateScriptAssoc",
UtilMisc.toMap("scriptTemplateId", getId()), null, false)) &&
UtilValidate.isEmpty(getDelegator().findByAnd("CmsPageScriptAssoc",
UtilMisc.toMap("scriptTemplateId", getId()), null, false));
} catch (GenericEntityException e) {
throw new CmsDataException(e);
}
}
public String getQualifiedName() { // 2016: new
TemplateBodySource tmplBodySrc = getTemplateBodySource();
String location = tmplBodySrc.getLocation();
String invokeName = getAssoc() != null ? getAssoc().getInvokeName() : null;
String qualName;
if (UtilValidate.isNotEmpty(location)) {
qualName = location;
} else if (tmplBodySrc.getStoredBody() != null) {
qualName = "[stored body]";
} else {
Debug.logError("Cms: Detected invalid template body source for script template " + getId() +
"; neither location nor stored body exists", module);
qualName = "[invalid]";
}
if (UtilValidate.isNotEmpty(invokeName)) {
qualName += "#" + invokeName;
}
return qualName;
}
@Override
public int remove() {
int rowsAffected = 0;
Delegator delegator = getDelegator();
try {
// delete CmsPageTemplateScriptAssoc
List<GenericValue> scriptAssoc = entity.getRelated("CmsPageTemplateScriptAssoc", null, null, false);
for (GenericValue scriptValue : scriptAssoc) {
scriptValue.remove();
rowsAffected += 1;
}
// delete CmsAssetTemplateScriptAssoc
scriptAssoc = entity.getRelated("CmsAssetTemplateScriptAssoc", null, null, false);
for (GenericValue scriptValue : scriptAssoc) {
scriptValue.remove();
rowsAffected += 1;
}
// delete CmsPageScriptAssoc
scriptAssoc = entity.getRelated("CmsPageScriptAssoc", null, null, false);
for (GenericValue scriptValue : scriptAssoc) {
scriptValue.remove();
rowsAffected += 1;
}
} catch (GenericEntityException e) {
throw makeRemoveException(e);
}
String contentId = getTemplateContentId();
return rowsAffected + super.remove() + removeTemplateBodySourceCommon(delegator, contentId);
}
public int removeIfOrphan() {
if (!isStandalone() && isOrphan()) {
Debug.logInfo("Cms: Deleting script '" + getId() + "' because it is marked as non-standalone (dependent)"
+ " and has become orphaned", module);
return super.remove();
} else {
return 0;
}
}
public ScriptExecutor getExecutor() {
ScriptExecutor executor = this.executor;
if (executor == null) {
try {
executor = ScriptExecutor.getExecutor(getScriptLang(), getId(),
getTemplateBodySource(), getAssoc() != null ? getAssoc().getInvokeName() : null);
if (executor == null) {
throw new CmsException("Invalid script template");
}
} catch(Throwable t) {
Debug.logError(t, "Cms: Invalid script template: " + getScriptLogRepr(), module);
executor = ScriptExecutor.getDummyExecutor();
}
this.executor = executor;
}
return executor;
}
public static ScriptExecutor getProcessorScriptExecutor() {
return processorScriptExecutor;
}
private static ScriptExecutor makeProcessorExecutor() {
ScriptExecutor executor = null;
if (UtilValidate.isNotEmpty(processorLocation)) {
try {
executor = ScriptExecutor.getExecutor("PROCESSOR", TemplateBodySource.fromLocation(processorLocation), null);
} catch(Throwable t) {
Debug.logError("Cms: Invalid process script executor for location: " + processorLocation, module);
}
}
return executor != null ? executor : ScriptExecutor.getDummyExecutor();
}
public static String getProcessorLocation() {
return processorLocation;
}
public String getScriptLogRepr() {
TemplateBodySource tmplBodySrc = getTemplateBodySource();
if (UtilValidate.isNotEmpty(tmplBodySrc.getLocation())) {
return "[id: " + getId() + ", location: " + tmplBodySrc.getLocation() + "]";
} else {
return "[id: " + getId() + "]";
}
}
public CmsScriptTemplateAssoc getAssoc() {
return assoc;
}
public Long getInputPosition() {
return getAssoc().getInputPosition();
}
public String getInvokeName() {
return getAssoc().getInvokeName();
}
public String getAssocId() {
return getAssoc().getAssocId();
}
public static abstract class CmsScriptTemplateAssoc extends CmsDataObject {
private static final long serialVersionUID = -4166919325339588209L;
protected CmsScriptTemplate scriptTemplate;
protected CmsScriptTemplateAssoc(GenericValue entity) {
super(entity);
}
public CmsScriptTemplateAssoc(Delegator delegator, Map<String, ?> fields, CmsScriptTemplate scriptTemplate) {
super(delegator, fields);
this.scriptTemplate = scriptTemplate;
}
protected CmsScriptTemplateAssoc(CmsScriptTemplateAssoc other, Map<String, Object> copyArgs) {
super(other, copyArgs);
// NOTE: don't bother clearing out the ID fields here, caller should handle
}
@Override
public void update(Map<String, ?> fields, boolean setIfEmpty) {
// here, must ignore scriptTemplateId - set at creation and should never change
if (fields.containsKey("scriptTemplateId") && UtilValidate.isNotEmpty(getScriptTemplateId())) {
fields = new HashMap<>(fields);
fields.remove("scriptTemplateId");
}
super.update(fields, setIfEmpty);
}
@Override
public abstract CmsScriptTemplateAssoc copy(Map<String, Object> copyArgs);
/**
* 2016: Loads ALL this object's content into the current instance.
* <p>
* WARN: IMPORTANT: AFTER THIS CALL,
* NO FURTHER CALLS ARE ALLOWED TO MODIFY THE INSTANCE IN MEMORY.
* Essential for thread safety!!!
*/
@Override
public void preload(PreloadWorker preloadWorker) {
super.preload(preloadWorker);
// DO NOT do this way because the preload will (un-intuitively) come from CmsScriptTemplate to us
//preloadWorker.preload(getScriptTemplate());
}
@Override
public abstract ScriptTemplateAssocWorker<? extends CmsScriptTemplateAssoc> getWorkerInst();
@Override
public void store() throws CmsException {
if (scriptTemplate != null) {
scriptTemplate.store();
if (UtilValidate.isEmpty(getScriptTemplateId())) {
setScriptTemplateId(scriptTemplate.getId());
} else {
if (!getScriptTemplateId().equals(scriptTemplate.getId())) {
throw new CmsException("Error: trying to change the scriptTemplateId of "
+ "existing script association '" + getId() + "'; cannot be changed once after creation");
}
}
}
super.store();
}
public String getAssocId() {
return getId();
}
public String getScriptTemplateId() {
return entity.getString("scriptTemplateId");
}
private void setScriptTemplateId(String scriptTemplateId) {
entity.setString("scriptTemplateId", scriptTemplateId);
}
public CmsScriptTemplate getScriptTemplate() {
CmsScriptTemplate scriptTemplate = this.scriptTemplate;
final CmsScriptTemplateAssoc assoc = this;
if (scriptTemplate == null) {
scriptTemplate = new ScriptTemplateWorker() { // extend the script template worker to pass the assoc to CmsScriptTemplate constructor
@Override
public CmsScriptTemplate makeFromValue(GenericValue value) throws CmsException {
return new CmsScriptTemplate(value, assoc);
}
@Override
public CmsScriptTemplate makeFromFields(Delegator delegator, Map<String, ?> fields)
throws CmsException {
return new CmsScriptTemplate(delegator, fields, assoc);
}
}.findByIdAlways(getDelegator(), getScriptTemplateId(), false);
this.scriptTemplate = scriptTemplate;
}
return scriptTemplate;
}
/**
* Temporary clearing method during copy operations, SHOULD be
* overridden by child classes to clear the other IDs.
*/
protected abstract void clearTemplate();
protected abstract void setTemplate(CmsDataObject template);
protected abstract boolean hasTemplate();
public Long getInputPosition() {
Long inputPosition = entity.getLong("inputPosition");
return inputPosition != null ? inputPosition : 0L;
}
public String getInvokeName() {
return entity.getString("invokeName");
}
public static abstract class ScriptTemplateAssocWorker<T extends CmsScriptTemplateAssoc> extends DataObjectWorker<T> {
protected ScriptTemplateAssocWorker(Class<T> dataObjectClass) {
super(dataObjectClass);
}
}
}
public enum ScriptLang {
// NOTE: Not all supported languages are listed here; just the ones we handle explicit
GROOVY("groovy"),
SCREEN_ACTIONS("screen-actions"),
SIMPLE_METHOD("simple-method"),
AUTO("auto"),
NONE("none");
private static final Set<String> nameSet;
private static final Map<String, ScriptLang> nameMap;
static {
Map<String, ScriptLang> map = new HashMap<>();
for(ScriptLang scriptLang : ScriptLang.values()) {
map.put(scriptLang.getName(), scriptLang);
}
nameMap = map;
nameSet = Collections.unmodifiableSet(new HashSet<>(map.keySet()));
}
private final String name;
private ScriptLang(String name) {
this.name = name;
}
public String getName() {
return name;
}
public static ScriptLang fromName(String name) {
return nameMap.get(name);
}
public static ScriptLang fromLocation(String fullLocation) throws CmsException {
if (fullLocation != null) {
String location = WidgetWorker.getScriptLocation(fullLocation);
if (location.endsWith(".groovy")) {
return GROOVY;
} else if (location.endsWith("Screens.xml")) { // WARN: this is a potentially problematic heuristic, but works with all known scipio files
return SCREEN_ACTIONS;
} else if (location.endsWith(".xml")) {
return SIMPLE_METHOD;
} else {
return AUTO;
}
}
return null;
}
public static Set<String> getNames() {
return nameSet;
}
}
@SuppressWarnings("serial")
public static abstract class ScriptExecutor implements Serializable {
private static final DummyScriptExecutor dummyExecutor = new DummyScriptExecutor();
protected final String id;
protected final String invokeName;
protected ScriptExecutor(String id, String invokeName) {
this.id = id;
this.invokeName = invokeName;
}
// Factory methods
/**
* NOTE: Explicit script type always has priority. If null, can usually be inferred from location,
* but currently cannot infer from body alone.
*/
public static ScriptExecutor getExecutor(String langStr, String id, TemplateBodySource tmplBodySrc, String invokeName) {
// NOTE: here location has priority, contrary to some other code, because executing
// from specified location is unambiguous and faster
String bodyOrLocation = tmplBodySrc.getLocation();
boolean isLocation = UtilValidate.isNotEmpty(bodyOrLocation);
if (!isLocation) {
bodyOrLocation = tmplBodySrc.getAvailableBody();
if (bodyOrLocation == null) {
throw new CmsException("Script '" + id + "' has no location or body to execute");
}
}
ScriptLang knownLang;
if (UtilValidate.isNotEmpty(langStr)) {
knownLang = ScriptLang.fromName(langStr);
} else {
knownLang = ScriptLang.AUTO;
langStr = null;
}
// NOTE: if explLang==null, it means langStr is an unknown lang but still explicit
if (knownLang == ScriptLang.AUTO) {
if (isLocation) {
knownLang = ScriptLang.fromLocation(tmplBodySrc.getLocation());
if (knownLang == null) {
// double fallback to AutoLocationScriptExecutor
knownLang = ScriptLang.AUTO;
}
} else {
if (langStr == null) {
throw new CmsException("Unable to determine script type for script body - no explicit type and no location (cannot be inferred from body alone)");
}
}
}
if (knownLang == ScriptLang.GROOVY) {
if (isLocation) {
return new GroovyLocationScriptExecutor(id, bodyOrLocation, invokeName);
} else {
return new GroovyBodyScriptExecutor(id, bodyOrLocation, invokeName);
}
} else if (knownLang == ScriptLang.SCREEN_ACTIONS) {
if (isLocation) {
return new ScreenActionsLocationScriptExecutor(id, bodyOrLocation, invokeName);
} else {
throw new CmsException("Screen actions script cannot be run from stored body (script id: '" + id + "')");
}
} else if (knownLang == ScriptLang.SIMPLE_METHOD) {
if (isLocation) {
return new SimpleMethodLocationScriptExecutor(id, bodyOrLocation, invokeName);
} else {
throw new CmsException("Simple-method script cannot be run from stored body (script id: '" + id + "')");
}
} else if (knownLang == ScriptLang.NONE) {
return dummyExecutor;
} else {
if (isLocation) {
return new AutoLocationScriptExecutor(id, bodyOrLocation, invokeName, langStr);
} else {
return new AutoBodyScriptExecutor(id, bodyOrLocation, invokeName, langStr);
}
}
}
public static ScriptExecutor getExecutor(String id, TemplateBodySource tmplBodySrc, String invokeName) {
return getExecutor(ScriptLang.AUTO.getName(), id, tmplBodySrc, invokeName);
}
public static ScriptExecutor getDummyExecutor() {
return dummyExecutor;
}
// Public methods
public abstract Object execute(Map<String, Object> context) throws Exception;
public Object executeSafe(Map<String, Object> context) {
try {
return execute(context);
} catch (Throwable t) {
Debug.logError(t, "Cms: Error running script " + getScriptLogRepr() + ": " + t.getMessage() + getReqLogIdDelimStr(context), module);
return null;
}
}
public abstract ScriptLang getScriptLang();
public abstract String getScriptLogRepr();
// Internal implementation methods
protected abstract Object run(Map<String, Object> context) throws Exception;
protected String getReqLogIdDelimStr(Map<String, Object> context) {
return CmsControlUtil.getReqLogIdDelimStr((HttpServletRequest) context.get("request"));
}
// Location and Body abstract classes
public static abstract class LocationScriptExecutor extends ScriptExecutor {
protected final String location;
protected LocationScriptExecutor(String id, String location, String invokeName) {
super(id, invokeName);
this.location = location;
}
@Override
public Object execute(Map<String, Object> context) throws Exception {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Running script from location: " + location + (invokeName != null ? "#" + invokeName : "")
+ (id != null ? " (id: " + id + ")" : "") + getReqLogIdDelimStr(context), module);
}
return run(context);
}
@Override
public String getScriptLogRepr() {
return "[id: " + id + ", location: " + location + (invokeName != null ? "#" + invokeName : "") + "]";
}
}
public static abstract class BodyScriptExecutor extends ScriptExecutor {
protected final String body;
public BodyScriptExecutor(String id, String body, String invokeName) {
super(id, invokeName);
this.body = body;
}
@Override
public Object execute(Map<String, Object> context) throws Exception {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Running script from body" + (id != null ? " (id: " + id + ")" : "") + getReqLogIdDelimStr(context), module);
}
return run(context);
}
@Override
public String getScriptLogRepr() {
return "[id: " + id + "]";
}
}
// Language implementations
public static class GroovyLocationScriptExecutor extends LocationScriptExecutor {
protected GroovyLocationScriptExecutor(String id, String location, String invokeName) {
super(id, location, invokeName);
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
return GroovyUtil.runScriptAtLocation(location, invokeName, context);
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.GROOVY;
}
}
public static class GroovyBodyScriptExecutor extends AutoBodyScriptExecutor {
protected GroovyBodyScriptExecutor(String id, String body, String invokeName) {
super(id, body, invokeName, "groovy");
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.GROOVY;
}
}
public static class SimpleMethodLocationScriptExecutor extends LocationScriptExecutor {
protected SimpleMethodLocationScriptExecutor(String id, String location, String invokeName) {
super(id, location, invokeName);
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
String method = invokeName;
// Based on org.ofbiz.widget.model.AbstractModelAction.Script.runAction
Map<String, Object> localContext = new HashMap<>();
localContext.putAll(context);
DispatchContext ctx = WidgetWorker.getDispatcher(context).getDispatchContext();
MethodContext methodContext = new MethodContext(ctx, localContext, null);
String result = SimpleMethod.runSimpleMethod(location, method, methodContext);
context.putAll(methodContext.getResults());
return result;
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.SIMPLE_METHOD;
}
}
public static class ScreenActionsLocationScriptExecutor extends LocationScriptExecutor {
protected ScreenActionsLocationScriptExecutor(String id, String location, String invokeName) {
super(id, location, invokeName);
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
ModelScreen widget = ScreenFactory.getScreenFromLocation(location, invokeName);
AbstractModelAction.runSubActions(widget.getSection().getActions(), context); // NOTE: wraps in RunTimeExceptions
return null;
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.SCREEN_ACTIONS;
}
}
public static class AutoLocationScriptExecutor extends LocationScriptExecutor {
// NOTE: we are simply ignoring the explicit lang here, ofbiz helpers won't let us override
//protected final String lang;
protected AutoLocationScriptExecutor(String id, String location, String invokeName, String lang) {
super(id, location, invokeName);
// this.lang = lang; // NOTE: ignoring for now
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
return ScriptUtil.executeScript(location, invokeName, context);
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.AUTO;
}
}
public static class AutoBodyScriptExecutor extends BodyScriptExecutor {
protected final Scriptlet scriptlet;
public AutoBodyScriptExecutor(String id, String body, String invokeName, String lang) {
super(id, body, invokeName);
this.scriptlet = new Scriptlet(lang + ":" + body);
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
return scriptlet.executeScript(context);
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.AUTO;
}
}
public static class DummyScriptExecutor extends ScriptExecutor {
private DummyScriptExecutor() {
super("NONE", null);
}
@Override
public ScriptLang getScriptLang() {
return ScriptLang.NONE;
}
@Override
public Object execute(Map<String, Object> context) throws Exception {
return null;
}
@Override
public String getScriptLogRepr() {
return "";
}
@Override
protected Object run(Map<String, Object> context) throws Exception {
return null;
}
}
}
@Override
public ScriptTemplateWorker getWorkerInst() {
return ScriptTemplateWorker.worker;
}
public static ScriptTemplateWorker getWorker() {
return ScriptTemplateWorker.worker;
}
public static class ScriptTemplateWorker extends DataObjectWorker<CmsScriptTemplate> {
private static final ScriptTemplateWorker worker = new ScriptTemplateWorker();
protected ScriptTemplateWorker() {
super(CmsScriptTemplate.class);
}
@Override
public CmsScriptTemplate makeFromValue(GenericValue value) throws CmsException {
return new CmsScriptTemplate(value);
}
@Override
public CmsScriptTemplate makeFromFields(Delegator delegator, Map<String, ?> fields) throws CmsException {
return new CmsScriptTemplate(delegator, fields);
}
@Override
public CmsScriptTemplate findById(Delegator delegator, String id, boolean useCache) throws CmsException {
return findById(delegator, id, useCache, null);
}
public CmsScriptTemplate findById(Delegator delegator, String id, boolean useCache, HttpServletRequest request) throws CmsException {
boolean useGlobalCache = isUseGlobalObjCacheStatic(useCache);
CmsObjectCache<CmsScriptTemplate> cache = null;
if (useGlobalCache) {
cache = idCache;
}
String key = delegator.getDelegatorName() + "::" + id;
CmsScriptTemplate script = null;
CacheEntry<CmsScriptTemplate> scriptEntry = null;
if (useGlobalCache) {
scriptEntry = cache.getEntry(key);
}
if (scriptEntry == null) {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Retrieving script template from database: id: " + id + CmsControlUtil.getReqLogIdDelimStr(request), module);
}
script = findOne(delegator, UtilMisc.toMap("scriptTemplateId", id),
isUseDbCacheBehindObjCacheStatic(useCache, useGlobalCache));
if (useGlobalCache) {
cache.put(key, script);
}
} else {
if (scriptEntry.hasValue()) {
if (CmsUtil.verboseOn()) {
Debug.logVerbose("Cms: Retrieving script template from cache: id: " + id + CmsControlUtil.getReqLogIdDelimStr(request), module);
}
script = scriptEntry.getValue();
}
}
return script;
}
public CmsScriptTemplate findByName(Delegator delegator, String name, String webSiteId, boolean webSiteIdOptional, boolean useCache) throws CmsException {
return findByName(delegator, name, webSiteId, webSiteIdOptional, useCache, null);
}
/**
* Finds by name and optional webSiteId.
* NOTE: if no webSiteId passed, it preferentially returns the records having no webSiteId.
* NOTE: 2019-01-25: webSiteId matching is now honored (was previously ignored).
*/
public CmsScriptTemplate findByName(Delegator delegator, String name, String webSiteId, boolean webSiteIdOptional, boolean useCache, HttpServletRequest request) throws CmsException {
boolean useGlobalCache = isUseGlobalObjCacheStatic(useCache);
CmsObjectCache<CmsScriptTemplate> cache = null;
if (useGlobalCache) {
cache = nameCache;
}
if (webSiteId != null && webSiteId.isEmpty()) {
webSiteId = null;
}
String key = delegator.getDelegatorName() + "::" + name + "::" + (webSiteId != null ? webSiteId : (webSiteIdOptional ? "_OPT_" : ""));
CmsScriptTemplate script = null;
CacheEntry<CmsScriptTemplate> scriptEntry = null;
if (useGlobalCache) {
scriptEntry = cache.getEntry(key);
}
if (scriptEntry == null) {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Retrieving script template from database: name: " + name + CmsControlUtil.getReqLogIdDelimStr(request), module);
}
Map<String, Object> fields = UtilMisc.toMap("templateName", name);
if (!webSiteIdOptional || webSiteId != null) {
fields.put("webSiteId", webSiteId);
}
// NOTE: always null webSiteIds first - this matters
List<CmsScriptTemplate> scripts = findAll(delegator, fields, UtilMisc.toList("webSiteId"), isUseDbCacheBehindObjCacheStatic(useCache, useGlobalCache));
if (scripts.size() > 0) {
script = scripts.get(0);
}
if (scripts.size() > 1) {
if (!webSiteIdOptional || webSiteId != null) {
Debug.logError("Cms: Multiple script templates with name '" + name + "' and webSiteId '" + webSiteId + "' found; using first found (id: " + script.getId() + ")", module);
} else if (script.getWebSiteId() != null) {
// if lookup by name only, it's usually because we expected only one result,
// either one with webSiteId null (no log warning) or only one webSiteId
Debug.logWarning("Cms: Multiple script templates with name '" + name + "' and having a webSiteId found; using first found (id: " + script.getId() + ", webSiteId: " + script.getWebSiteId() + ")", module);
}
}
if (useGlobalCache) {
cache.put(key, script);
}
} else {
if (scriptEntry.hasValue()) {
if (CmsUtil.verboseOn()) {
Debug.logVerbose("Cms: Retrieving script template from cache: name: " + name + CmsControlUtil.getReqLogIdDelimStr(request), module);
}
script = scriptEntry.getValue();
}
}
return script;
}
}
@Override
public void acceptEntityDepsVisitor(CmsEntityVisitor visitor, GenericValue relValue, VisitRelation relValueRelation, CmsMajorObject majorDataObj) throws Exception {
CmsEntityVisit.acceptRelatedEntityDepsVisitor(visitor, VisitRelPlan.visitRelations, this.getEntity(), relValueRelation, relValue, this);
}
public static class VisitRelPlan extends VisitRelations.BuildPlan {
public static final VisitRelPlan INSTANCE = new VisitRelPlan("CmsScriptTemplate");
static final VisitRelations visitRelations = INSTANCE.buildSafe();
public VisitRelPlan(String majorEntityName) { super(majorEntityName); }
@Override public VisitRelations.Builder planDefinition(Delegator delegator) throws Exception {
return newBuilder(delegator)
.entity("CmsScriptTemplate")
.self();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql;
import org.apache.calcite.plan.Context;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.type.SqlOperandTypeChecker;
import org.apache.calcite.sql.type.SqlOperandTypeInference;
import org.apache.calcite.sql.type.SqlReturnTypeInference;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.calcite.util.Optionality;
import java.util.List;
import java.util.Objects;
import javax.annotation.Nonnull;
/**
* Abstract base class for the definition of an aggregate function: an operator
* which aggregates sets of values into a result.
*/
public abstract class SqlAggFunction extends SqlFunction implements Context {
private final boolean requiresOrder;
private final boolean requiresOver;
private final Optionality requiresGroupOrder;
//~ Constructors -----------------------------------------------------------
/** Creates a built-in SqlAggFunction. */
@Deprecated // to be removed before 2.0
protected SqlAggFunction(
String name,
SqlKind kind,
SqlReturnTypeInference returnTypeInference,
SqlOperandTypeInference operandTypeInference,
SqlOperandTypeChecker operandTypeChecker,
SqlFunctionCategory funcType) {
// We leave sqlIdentifier as null to indicate that this is a builtin.
this(name, null, kind, returnTypeInference, operandTypeInference,
operandTypeChecker, funcType, false, false,
Optionality.FORBIDDEN);
}
/** Creates a user-defined SqlAggFunction. */
@Deprecated // to be removed before 2.0
protected SqlAggFunction(
String name,
SqlIdentifier sqlIdentifier,
SqlKind kind,
SqlReturnTypeInference returnTypeInference,
SqlOperandTypeInference operandTypeInference,
SqlOperandTypeChecker operandTypeChecker,
SqlFunctionCategory funcType) {
this(name, sqlIdentifier, kind, returnTypeInference, operandTypeInference,
operandTypeChecker, funcType, false, false,
Optionality.FORBIDDEN);
}
@Deprecated // to be removed before 2.0
protected SqlAggFunction(
String name,
SqlIdentifier sqlIdentifier,
SqlKind kind,
SqlReturnTypeInference returnTypeInference,
SqlOperandTypeInference operandTypeInference,
SqlOperandTypeChecker operandTypeChecker,
SqlFunctionCategory funcType,
boolean requiresOrder,
boolean requiresOver) {
this(name, sqlIdentifier, kind, returnTypeInference, operandTypeInference,
operandTypeChecker, funcType, requiresOrder, requiresOver,
Optionality.FORBIDDEN);
}
/** Creates a built-in or user-defined SqlAggFunction or window function.
*
* <p>A user-defined function will have a value for {@code sqlIdentifier}; for
* a built-in function it will be null. */
protected SqlAggFunction(
String name,
SqlIdentifier sqlIdentifier,
SqlKind kind,
SqlReturnTypeInference returnTypeInference,
SqlOperandTypeInference operandTypeInference,
SqlOperandTypeChecker operandTypeChecker,
SqlFunctionCategory funcType,
boolean requiresOrder,
boolean requiresOver,
Optionality requiresGroupOrder) {
super(name, sqlIdentifier, kind, returnTypeInference, operandTypeInference,
operandTypeChecker, null, funcType);
this.requiresOrder = requiresOrder;
this.requiresOver = requiresOver;
this.requiresGroupOrder = Objects.requireNonNull(requiresGroupOrder);
}
//~ Methods ----------------------------------------------------------------
public <T> T unwrap(Class<T> clazz) {
return clazz.isInstance(this) ? clazz.cast(this) : null;
}
@Override public boolean isAggregator() {
return true;
}
@Override public boolean isQuantifierAllowed() {
return true;
}
@Override public void validateCall(
SqlCall call,
SqlValidator validator,
SqlValidatorScope scope,
SqlValidatorScope operandScope) {
super.validateCall(call, validator, scope, operandScope);
validator.validateAggregateParams(call, null, null, scope);
}
@Override public final boolean requiresOrder() {
return requiresOrder;
}
/** Returns whether this aggregate function must, may, or must not contain a
* {@code WITHIN GROUP (ORDER ...)} clause.
*
* <p>Cases:<ul>
*
* <li>If {@link Optionality#MANDATORY},
* then {@code AGG(x) WITHIN GROUP (ORDER BY 1)} is valid,
* and {@code AGG(x)} is invalid.
*
* <li>If {@link Optionality#OPTIONAL},
* then {@code AGG(x) WITHIN GROUP (ORDER BY 1)}
* and {@code AGG(x)} are both valid.
*
* <li>If {@link Optionality#IGNORED},
* then {@code AGG(x)} is valid,
* and {@code AGG(x) WITHIN GROUP (ORDER BY 1)} is valid but is
* treated the same as {@code AGG(x)}.
*
* <li>If {@link Optionality#FORBIDDEN},
* then {@code AGG(x) WITHIN GROUP (ORDER BY 1)} is invalid,
* and {@code AGG(x)} is valid.
* </ul>
*/
public @Nonnull Optionality requiresGroupOrder() {
return requiresGroupOrder;
}
@Override public final boolean requiresOver() {
return requiresOver;
}
/** Returns whether this aggregate function allows the {@code DISTINCT}
* keyword.
*
* <p>The default implementation returns {@link Optionality#OPTIONAL},
* which is appropriate for most aggregate functions, including {@code SUM}
* and {@code COUNT}.
*
* <p>Some aggregate functions, for example {@code MIN}, produce the same
* result with or without {@code DISTINCT}, and therefore return
* {@link Optionality#IGNORED} to indicate this. For such functions,
* Calcite will probably remove {@code DISTINCT} while optimizing the query.
*/
public @Nonnull Optionality getDistinctOptionality() {
return Optionality.OPTIONAL;
}
@Deprecated // to be removed before 2.0
public List<RelDataType> getParameterTypes(RelDataTypeFactory typeFactory) {
throw new UnsupportedOperationException("remove before calcite-2.0");
}
@Deprecated // to be removed before 2.0
public RelDataType getReturnType(RelDataTypeFactory typeFactory) {
throw new UnsupportedOperationException("remove before calcite-2.0");
}
/** Whether this aggregate function allows a {@code FILTER (WHERE ...)}
* clause. */
public boolean allowsFilter() {
return true;
}
/** Returns whether this aggregate function allows specifying null treatment
* ({@code RESPECT NULLS} or {@code IGNORE NULLS}). */
public boolean allowsNullTreatment() {
return false;
}
}
// End SqlAggFunction.java
| |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service.materials;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.config.UpdateConfigCommand;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.domain.config.*;
import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.helper.ConfigFileFixture;
import com.thoughtworks.go.plugin.infra.PluginManager;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.server.dao.PluginSqlMapDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.util.GoConfigFileHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static com.thoughtworks.go.serverhealth.HealthStateType.forbidden;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:/applicationContext-global.xml",
"classpath:/applicationContext-dataLocalAccess.xml",
"classpath:/testPropertyConfigurer.xml",
"classpath:/spring-all-servlet.xml",
})
public class PackageRepositoryServiceIntegrationTest {
@Autowired
private GoConfigService goConfigService;
@Autowired
private PackageRepositoryService service;
@Autowired
private PluginSqlMapDao pluginSqlMapDao;
@Autowired
private GoConfigDao goConfigDao;
private GoConfigFileHelper configHelper;
@Mock
private PluginManager pluginManager;
private Username username;
@Before
public void setUp() throws Exception {
initMocks(this);
String content = ConfigFileFixture.configWithSecurity("<security>\n" +
" <authConfigs>\n" +
" <authConfig id=\"9cad79b0-4d9e-4a62-829c-eb4d9488062f\" pluginId=\"cd.go.authentication.passwordfile\">\n" +
" <property>\n" +
" <key>PasswordFilePath</key>\n" +
" <value>../manual-testing/ant_hg/password.properties</value>\n" +
" </property>\n" +
" </authConfig>\n" +
" </authConfigs>" +
"</security>");
configHelper = new GoConfigFileHelper(content);
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
goConfigService.forceNotifyListeners();
service.setPluginManager(pluginManager);
username = new Username("CurrentUser");
UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(asList(username.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
goConfigService.updateConfig(command);
}
@After
public void tearDown() throws Exception {
configHelper.onTearDown();
pluginSqlMapDao.deleteAllPlugins();
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories());
}
@Test
public void shouldDeleteTheSpecifiedPackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(username, npmRepo, result);
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.setMessage(EntityType.PackageRepository.deleteSuccessful(npmRepo.getId()));
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(0));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToDeletePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToDelete("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(new Username("UnauthorizedUser"), npmRepo, result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToCreatePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.createPackageRepository(npmRepo, new Username("UnauthorizedUser"), result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToUpdatePackageRepository() throws Exception {
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm.org", "UnauthorizedUser"), forbidden());
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String oldRepoId = "npmOrg";
String newRepoId = "npm.org";
PackageRepository oldPackageRepo = new PackageRepository();
PackageRepository newPackageRepo = new PackageRepository();
PluginConfiguration pluginConfiguration = new PluginConfiguration();
pluginConfiguration.setId("npm");
oldPackageRepo.setPluginConfiguration(pluginConfiguration);
oldPackageRepo.setId(oldRepoId);
oldPackageRepo.setName(oldRepoId);
newPackageRepo.setPluginConfiguration(pluginConfiguration);
newPackageRepo.setId(newRepoId);
newPackageRepo.setName(newRepoId);
Configuration configuration = new Configuration();
configuration.add(new ConfigurationProperty(new ConfigurationKey("foo"), new ConfigurationValue("bar")));
oldPackageRepo.setConfiguration(configuration);
newPackageRepo.setConfiguration(configuration);
when(pluginManager.getPluginDescriptorFor("npm")).thenReturn(GoPluginDescriptor.builder().id("npm").build());
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(oldPackageRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
service.updatePackageRepository(newPackageRepo, new Username("UnauthorizedUser"), "md5", result, oldRepoId);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
}
}
| |
package org.moparscape.elysium.util;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import org.moparscape.elysium.world.Point;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.text.SimpleDateFormat;
import java.util.Random;
/**
* Created by IntelliJ IDEA.
*
* @author RSCD developers
*/
public final class DataConversions {
private static final char characters[] = {' ', 'e', 't', 'a', 'o', 'i', 'h',
'n', 's', 'r', 'd', 'l', 'u', 'm', 'w', 'c', 'y', 'f', 'g', 'p',
'b', 'v', 'k', 'x', 'j', 'q', 'z', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', ' ', '!', '?', '.', ',', ':', ';', '(', ')',
'-', '&', '*', '\\', '\'', '@', '#', '+', '=', '\243', '$', '%',
'"', '[', ']'};
private static final SimpleDateFormat formatter = new SimpleDateFormat(
"HH:mm:ss dd-MM-yy");
private static final BigInteger key = new BigInteger(
"730546719878348732291497161314617369560443701473303681965331739205703475535302276087891130348991033265134162275669215460061940182844329219743687403068279");
private static final MessageDigest md;
private static final BigInteger modulus = new BigInteger(
"1549611057746979844352781944553705273443228154042066840514290174539588436243191882510185738846985723357723362764835928526260868977814405651690121789896823");
private static final Random rand = new Random();
/**
* Creates an instance of the message digest used for creating md5 hashes
*/
static {
try {
md = MessageDigest.getInstance("MD5");
} catch (Exception e) {
throw new ExceptionInInitializerError();
}
}
public static long IPToLong(String ip) {
String[] octets = ip.split("\\.");
long result = 0L;
for (int x = 0; x < 4; x++) {
result += Integer.parseInt(octets[x]) * Math.pow(256, 3 - x);
}
return result;
}
public static String IPToString(long ip) {
String result = "0.0.0.0";
for (int x = 0; x < 4; x++) {
int octet = (int) (ip / Math.pow(256, 3 - x));
ip -= octet * Math.pow(256, 3 - x);
if (x == 0) {
result = String.valueOf(octet);
} else {
result += ("." + octet);
}
}
return result;
}
/**
* Calculates the average of all values in the array
*/
public static int average(int[] values) {
int total = 0;
for (int value : values) {
total += value;
}
return total / values.length;
}
/**
* Decodes a byte array back into a string
*/
public static String byteToString(byte[] data, int offset, int length) {
char[] buffer = new char[100];
try {
int k = 0;
int l = -1;
for (int i1 = 0; i1 < length; i1++) {
int j1 = data[offset++] & 0xff;
int k1 = j1 >> 4 & 0xf;
if (l == -1) {
if (k1 < 13) {
buffer[k++] = characters[k1];
} else {
l = k1;
}
} else {
buffer[k++] = characters[((l << 4) + k1) - 195];
l = -1;
}
k1 = j1 & 0xf;
if (l == -1) {
if (k1 < 13) {
buffer[k++] = characters[k1];
} else {
l = k1;
}
} else {
buffer[k++] = characters[((l << 4) + k1) - 195];
l = -1;
}
}
boolean flag = true;
for (int l1 = 0; l1 < k; l1++) {
char c = buffer[l1];
if (l1 > 4 && c == '@') {
buffer[l1] = ' ';
}
if (c == '%') {
buffer[l1] = ' ';
}
if (flag && c >= 'a' && c <= 'z') {
buffer[l1] += '\uFFE0';
flag = false;
}
if (c == '.' || c == '!' || c == ':') {
flag = true;
}
}
return new String(buffer, 0, k);
} catch (Exception e) {
return ".";
}
}
/**
* Decrypts an RSA encrypted packet using our private key
*/
public static ByteBuf decryptRSA(byte[] pData) {
try {
BigInteger bigInteger = new BigInteger(pData);
byte[] decrypted = bigInteger.modPow(key, modulus).toByteArray();
return Unpooled.wrappedBuffer(decrypted);
} catch (Exception e) {
return null;
}
}
/**
* returns the code used to represent the given character in our byte array
* encoding methods
*/
private static int getCharCode(char c) {
for (int x = 0; x < characters.length; x++) {
if (c == characters[x]) {
return x;
}
}
return 0;
}
private static byte getMobCoordOffset(int coord1, int coord2) {
byte offset = (byte) (coord1 - coord2);
if (offset < 0) {
offset += 32;
}
return offset;
}
public static byte[] getMobPositionOffsets(Point p1, Point p2) {
byte[] rv = new byte[2];
rv[0] = getMobCoordOffset(p1.getX(), p2.getX());
rv[1] = getMobCoordOffset(p1.getY(), p2.getY());
return rv;
}
private static byte getObjectCoordOffset(int coord1, int coord2) {
return (byte) (coord1 - coord2);
}
public static byte[] getObjectPositionOffsets(Point p1, Point p2) {
byte[] rv = new byte[2];
rv[0] = getObjectCoordOffset(p1.getX(), p2.getX());
rv[1] = getObjectCoordOffset(p1.getY(), p2.getY());
return rv;
}
/**
* Returns the random number generator
*/
public static Random getRandom() {
return rand;
}
/**
* Converts a usernames hash back to the username
*/
public static String hashToUsername(long l) {
if (l < 0L)
return "invalid_name";
String s = "";
while (l != 0L) {
int i = (int) (l % 37L);
l /= 37L;
if (i == 0)
s = " " + s;
else if (i < 27) {
if (l % 37L == 0L)
s = (char) ((i + 65) - 1) + s;
else
s = (char) ((i + 97) - 1) + s;
} else {
s = (char) ((i + 48) - 27) + s;
}
}
return s;
}
/**
* Checks if the given int is in the array
*/
public static boolean inArray(int[] haystack, int needle) {
for (int option : haystack) {
if (needle == option) {
return true;
}
}
return false;
}
/**
* Checks if the given point is in the array
*/
public static boolean inPointArray(Point[] haystack, Point needle) {
for (Point option : haystack) {
if (needle.getX() == option.getX()
&& needle.getY() == option.getY()) {
return true;
}
}
return false;
}
public static void main(String[] argv) throws Exception {
if (argv[0].equals("encode"))
System.out.println(usernameToHash(argv[1]));
if (argv[0].equals("decode"))
System.out.println(hashToUsername(Long.parseLong(argv[1])));
}
/**
* returns the max of the 2 values
*/
public static int max(int i1, int i2) {
return i1 > i2 ? i1 : i2;
}
/**
* returns the md5 hash of a string
*/
public static String md5(String s) {
// md.reset();
// md.update(s.getBytes());
// return new HexString(md.digest()).toString();
throw new UnsupportedOperationException("md5(String) not yet implemented.");
}
/**
* Returns true percent% of the time
*/
public static boolean percentChance(int percent) {
return random(1, 100) <= percent;
}
/**
* returns a random number within the given bounds
*/
public static double random(double low, double high) {
return high - (rand.nextDouble() * low);
}
/**
* returns a random number within the given bounds
*/
public static int random(int low, int high) {
return low + rand.nextInt(high - low + 1);
}
/**
* returns a random number within the given bounds, but allows for certain
* values to be weighted
*/
public static int randomWeighted(int low, int dip, int peak, int max) {
int total = 0;
int probability = 100;
int[] probArray = new int[max + 1];
for (int x = 0; x < probArray.length; x++) {
total += probArray[x] = probability;
if (x < dip || x > peak) {
probability -= 3;
} else {
probability += 3;
}
}
int hit = random(0, total);
total = 0;
for (int x = 0; x < probArray.length; x++) {
if (hit >= total && hit < (total + probArray[x])) {
return x;
}
total += probArray[x];
}
return 0;
}
public static double round(double value, int decimalPlace) {
BigDecimal bd = new BigDecimal(value);
bd = bd.setScale(decimalPlace, BigDecimal.ROUND_HALF_UP);
return (bd.doubleValue());
}
public static int roundUp(double val) {
return (int) Math.round(val + 0.5D);
}
/**
* Returns a ByteBuffer containing everything available from the given
* InputStream
*/
public static ByteBuffer streamToBuffer(BufferedInputStream in)
throws IOException {
byte[] buffer = new byte[in.available()];
in.read(buffer, 0, buffer.length);
return ByteBuffer.wrap(buffer);
}
/**
* Encodes a string into a byte array
*/
public static byte[] stringToByteArray(String message) {
byte[] buffer = new byte[100];
if (message.length() > 80) {
message = message.substring(0, 80);
}
message = message.toLowerCase();
int length = 0;
int j = -1;
for (int k = 0; k < message.length(); k++) {
int code = getCharCode(message.charAt(k));
if (code > 12) {
code += 195;
}
if (j == -1) {
if (code < 13)
j = code;
else
buffer[length++] = (byte) code;
} else if (code < 13) {
buffer[length++] = (byte) ((j << 4) + code);
j = -1;
} else {
buffer[length++] = (byte) ((j << 4) + (code >> 4));
j = code & 0xf;
}
}
if (j != -1) {
buffer[length++] = (byte) (j << 4);
}
byte[] string = new byte[length];
System.arraycopy(buffer, 0, string, 0, length);
return string;
}
public static String timeFormat(long l) {
return formatter.format(l);
}
public static String timeSince(long time) {
// int seconds = (int) ((GameEngine.getTime() - time) / 1000);
// int minutes = (int) (seconds / 60);
// int hours = (int) (minutes / 60);
// int days = (int) (hours / 24);
// return days + " days " + (hours % 24) + " hours " + (minutes % 60)
// + " mins";
throw new UnsupportedOperationException("timeSince not yet implemented.");
}
/**
* Converts a username to a unique hash
*/
public static long usernameToHash(String s) {
s = s.toLowerCase();
String s1 = "";
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c >= 'a' && c <= 'z')
s1 = s1 + c;
else if (c >= '0' && c <= '9')
s1 = s1 + c;
else
s1 = s1 + ' ';
}
s1 = s1.trim();
if (s1.length() > 12)
s1 = s1.substring(0, 12);
long l = 0L;
for (int j = 0; j < s1.length(); j++) {
char c1 = s1.charAt(j);
l *= 37L;
if (c1 >= 'a' && c1 <= 'z')
l += (1 + c1) - 97;
else if (c1 >= '0' && c1 <= '9')
l += (27 + c1) - 48;
}
return l;
}
}
| |
package core.geom;
import core.Model;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.awt.geom.GeneralPath;
import java.awt.geom.NoninvertibleTransformException;
/**
*
* @author Mithusayel Murmu
*/
public class FreeShape extends Shapes {
private GeneralPath path;
private double prevAngle;
private double curAngle;
// First time rotate
private boolean fRot;
private AffineTransform trans;
private AffineTransform inverse;
private Rectangle prevRect;
public FreeShape(Point start) {
type = Model.TOOLS.FREESHAPE;
path = new GeneralPath(GeneralPath.WIND_NON_ZERO);
path.moveTo(start.x, start.y);
fRot = true;
prevAngle = curAngle = 0;
trans = AffineTransform.getRotateInstance(0);
}
private FreeShape() {
type = Model.TOOLS.FREESHAPE;
fRot = true;
prevAngle = curAngle = 0;
trans = AffineTransform.getRotateInstance(0);
}
@Override
public void resetInverse() {
inverse = null;
prevRect = null;
}
@Override
public GeneralPath getShape() {
return path;
}
@Override
public void modify(Point p1, Point p2) {
path.lineTo(p2.x, p2.y);
}
@Override
public void hflip() {
if(prevRect == null)
prevRect = getBounds();
int cx = (int)prevRect.getCenterX();
int cy = (int)prevRect.getCenterY();
translate(-cx, -cy);
trans.setToScale(-1, 1);
path.transform(trans);
translate(cx, cy);
}
@Override
public void vflip() {
if(prevRect == null)
prevRect = getBounds();
int cx = (int)prevRect.getCenterX();
int cy = (int)prevRect.getCenterY();
translate(-cx, -cy);
trans.setToScale(1, -1);
path.transform(trans);
translate(cx, cy);
}
@Override
public void rot_left() {
if(prevRect == null)
prevRect = getBounds();
int cx = (int)prevRect.getCenterX();
int cy = (int)prevRect.getCenterY();
translate(-cx, -cy);
trans.setToQuadrantRotation(-1);
path.transform(trans);
translate(cx, cy);
}
@Override
public void rot_right() {
if(prevRect == null)
prevRect = getBounds();
int cx = (int)prevRect.getCenterX();
int cy = (int)prevRect.getCenterY();
translate(-cx, -cy);
trans.setToQuadrantRotation(1);
path.transform(trans);
translate(cx, cy);
}
@Override
public void translate(int x, int y) {
trans.setToTranslation(x, y);
path.transform(trans);
trans.setToIdentity();
}
/**
*
* @param dx : change in width
* @param dy : change in height
* @param cx : x point which remains constant
* @param cy : y point which remains constant
*/
@Override
public void resize(int dx, int dy, int cx, int cy) {}
@Override
public void resize(int dx, int dy, int cx, int cy, Rectangle r) {
if(dx == 0 || dy == 0)
return;
double nw = dx / Math.max(r.getWidth(), 1);
double nh = dy / Math.max(r.getHeight(), 1);
translate(-cx, -cy);
if(inverse != null)
path.transform(inverse);
trans.setToScale(nw, nh);
path.transform(trans);
try {
inverse = trans.createInverse();
} catch(NoninvertibleTransformException e) {
inverse = AffineTransform.getTranslateInstance(0, 0);
}
translate(cx, cy);
}
@Override
public void rotate(int x, int y, int cx, int cy) {
curAngle = Math.PI/2 - Math.atan2(cy - y, x - cx);
if(fRot) {
prevAngle = curAngle;
fRot = false;
}
trans.setToRotation(curAngle - prevAngle, cx, cy);
path.transform(trans);
prevAngle = curAngle;
}
@Override
public Rectangle getBounds() {
return path.getBounds();
}
@Override
public void paint(Graphics2D g2) {
if(Model.getMode() == Model.DRAW_MODE)
g2.draw(path);
else {
if(fill)
g2.fill(path);
else
g2.draw(path);
}
}
public void resetAngles() {
prevAngle = curAngle = 0;
fRot = true;
}
@Override
public FreeShape clone() {
FreeShape fs = new FreeShape();
fs.setAntialias(antialias);
fs.setFill(fill);
fs.setColor(color);
fs.setStroke(stroke);
fs.path = (GeneralPath)path.clone();
return fs;
}
}
| |
/**
* Copyright (c) 2013-2020 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.transaction;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.redisson.RedissonMapCache;
import org.redisson.api.RCountDownLatch;
import org.redisson.api.RFuture;
import org.redisson.api.RLock;
import org.redisson.api.RPermitExpirableSemaphore;
import org.redisson.api.RReadWriteLock;
import org.redisson.api.RSemaphore;
import org.redisson.api.mapreduce.RMapReduce;
import org.redisson.client.RedisClient;
import org.redisson.client.codec.Codec;
import org.redisson.client.protocol.decoder.MapScanResult;
import org.redisson.command.CommandAsyncExecutor;
import org.redisson.transaction.operation.TransactionalOperation;
/**
*
* @author Nikita Koksharov
*
* @param <K> key type
* @param <V> value type
*/
public class RedissonTransactionalMapCache<K, V> extends RedissonMapCache<K, V> {
private final BaseTransactionalMapCache<K, V> transactionalMap;
private final AtomicBoolean executed;
public RedissonTransactionalMapCache(CommandAsyncExecutor commandExecutor, String name,
List<TransactionalOperation> operations, long timeout, AtomicBoolean executed, String transactionId) {
super(null, commandExecutor, name, null, null, null);
this.executed = executed;
RedissonMapCache<K, V> innerMap = new RedissonMapCache<K, V>(null, commandExecutor, name, null, null, null);
this.transactionalMap = new BaseTransactionalMapCache<K, V>(commandExecutor, timeout, operations, innerMap, transactionId);
}
public RedissonTransactionalMapCache(Codec codec, CommandAsyncExecutor commandExecutor, String name,
List<TransactionalOperation> operations, long timeout, AtomicBoolean executed, String transactionId) {
super(codec, null, commandExecutor, name, null, null, null);
this.executed = executed;
RedissonMapCache<K, V> innerMap = new RedissonMapCache<K, V>(codec, null, commandExecutor, name, null, null, null);
this.transactionalMap = new BaseTransactionalMapCache<K, V>(commandExecutor, timeout, operations, innerMap, transactionId);
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit) {
throw new UnsupportedOperationException("expire method is not supported in transaction");
}
@Override
public RFuture<Boolean> expireAtAsync(Date timestamp) {
throw new UnsupportedOperationException("expireAt method is not supported in transaction");
}
@Override
public RFuture<Boolean> expireAtAsync(long timestamp) {
throw new UnsupportedOperationException("expireAt method is not supported in transaction");
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("clearExpire method is not supported in transaction");
}
@Override
public RFuture<Boolean> moveAsync(int database) {
throw new UnsupportedOperationException("moveAsync method is not supported in transaction");
}
@Override
public RFuture<Void> migrateAsync(String host, int port, int database, long timeout) {
throw new UnsupportedOperationException("migrateAsync method is not supported in transaction");
}
@Override
public RFuture<Boolean> touchAsync() {
checkState();
return transactionalMap.touchAsync(commandExecutor);
}
@Override
public RFuture<Boolean> isExistsAsync() {
checkState();
return transactionalMap.isExistsAsync();
}
@Override
public RFuture<Boolean> unlinkAsync() {
return transactionalMap.unlinkAsync(commandExecutor);
}
@Override
public RFuture<Boolean> deleteAsync() {
checkState();
return transactionalMap.deleteAsync(commandExecutor);
}
@Override
public RFuture<V> putIfAbsentAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
return transactionalMap.putIfAbsentAsync(key, value, ttl, ttlUnit, maxIdleTime, maxIdleUnit);
}
@Override
public RFuture<Boolean> fastPutOperationAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
return transactionalMap.fastPutOperationAsync(key, value, ttl, ttlUnit, maxIdleTime, maxIdleUnit);
}
@Override
public RFuture<V> putOperationAsync(K key, V value, long ttlTimeout, long maxIdleTimeout, long maxIdleDelta, long ttlTimeoutDelta) {
return transactionalMap.putOperationAsync(key, value, ttlTimeout, maxIdleTimeout, maxIdleDelta, ttlTimeoutDelta);
}
public RFuture<Boolean> fastPutIfAbsentAsync(final K key, final V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
return transactionalMap.fastPutIfAbsentAsync(key, value, ttl, ttlUnit, maxIdleTime, maxIdleUnit);
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("remainTimeToLiveAsync method is not supported in transaction");
}
@Override
public RFuture<Void> setMaxSizeAsync(int maxSize) {
throw new UnsupportedOperationException("setMaxSize method is not supported in transaction");
}
@Override
public RFuture<Boolean> trySetMaxSizeAsync(int maxSize) {
throw new UnsupportedOperationException("trySetMaxSize method is not supported in transaction");
}
@Override
public <KOut, VOut> RMapReduce<K, V, KOut, VOut> mapReduce() {
throw new UnsupportedOperationException("mapReduce method is not supported in transaction");
}
@Override
public MapScanResult<Object, Object> scanIterator(String name, RedisClient client,
long startPos, String pattern, int count) {
checkState();
return transactionalMap.scanIterator(name, client, startPos, pattern, count);
}
@Override
public RFuture<Boolean> containsKeyAsync(Object key) {
checkState();
return transactionalMap.containsKeyAsync(key);
}
@Override
public RFuture<Boolean> containsValueAsync(Object value) {
checkState();
return transactionalMap.containsValueAsync(value);
}
@Override
protected RFuture<V> addAndGetOperationAsync(K key, Number value) {
checkState();
return transactionalMap.addAndGetOperationAsync(key, value);
}
@Override
protected RFuture<V> putIfAbsentOperationAsync(K key, V value) {
checkState();
return transactionalMap.putIfAbsentOperationAsync(key, value);
}
@Override
protected RFuture<V> putOperationAsync(K key, V value) {
checkState();
return transactionalMap.putOperationAsync(key, value);
}
@Override
protected RFuture<Boolean> fastPutIfAbsentOperationAsync(K key, V value) {
checkState();
return transactionalMap.fastPutIfAbsentOperationAsync(key, value);
}
@Override
protected RFuture<Boolean> fastPutOperationAsync(K key, V value) {
checkState();
return transactionalMap.fastPutOperationAsync(key, value);
}
@Override
@SuppressWarnings("unchecked")
protected RFuture<Long> fastRemoveOperationAsync(K... keys) {
checkState();
return transactionalMap.fastRemoveOperationAsync(keys);
}
@Override
public RFuture<Integer> valueSizeAsync(K key) {
checkState();
return transactionalMap.valueSizeAsync(key);
}
@Override
public RFuture<V> getOperationAsync(K key) {
checkState();
return transactionalMap.getOperationAsync(key);
}
@Override
public RFuture<Set<K>> readAllKeySetAsync() {
checkState();
return transactionalMap.readAllKeySetAsync();
}
@Override
public RFuture<Set<Entry<K, V>>> readAllEntrySetAsync() {
checkState();
return transactionalMap.readAllEntrySetAsync();
}
@Override
public RFuture<Collection<V>> readAllValuesAsync() {
checkState();
return transactionalMap.readAllValuesAsync();
}
@Override
public RFuture<Map<K, V>> readAllMapAsync() {
checkState();
return transactionalMap.readAllMapAsync();
}
@Override
public RFuture<Map<K, V>> getAllOperationAsync(Set<K> keys) {
checkState();
return transactionalMap.getAllOperationAsync(keys);
}
@Override
protected RFuture<V> removeOperationAsync(K key) {
checkState();
return transactionalMap.removeOperationAsync(key);
}
@Override
protected RFuture<Boolean> removeOperationAsync(Object key, Object value) {
checkState();
return transactionalMap.removeOperationAsync(key, value);
}
@Override
protected RFuture<Void> putAllOperationAsync(Map<? extends K, ? extends V> entries) {
checkState();
return transactionalMap.putAllOperationAsync(entries);
}
@Override
protected RFuture<Boolean> replaceOperationAsync(K key, V oldValue, V newValue) {
checkState();
return transactionalMap.replaceOperationAsync(key, oldValue, newValue);
}
@Override
protected RFuture<V> replaceOperationAsync(K key, V value) {
checkState();
return transactionalMap.replaceOperationAsync(key, value);
}
protected void checkState() {
if (executed.get()) {
throw new IllegalStateException("Unable to execute operation. Transaction is in finished state!");
}
}
@Override
public RFuture<Void> loadAllAsync(boolean replaceExistingValues, int parallelism) {
throw new UnsupportedOperationException("loadAll method is not supported in transaction");
}
@Override
public RFuture<Void> loadAllAsync(Set<? extends K> keys, boolean replaceExistingValues, int parallelism) {
throw new UnsupportedOperationException("loadAll method is not supported in transaction");
}
@Override
public RLock getFairLock(K key) {
throw new UnsupportedOperationException("getFairLock method is not supported in transaction");
}
@Override
public RCountDownLatch getCountDownLatch(K key) {
throw new UnsupportedOperationException("getCountDownLatch method is not supported in transaction");
}
@Override
public RPermitExpirableSemaphore getPermitExpirableSemaphore(K key) {
throw new UnsupportedOperationException("getPermitExpirableSemaphore method is not supported in transaction");
}
@Override
public RSemaphore getSemaphore(K key) {
throw new UnsupportedOperationException("getSemaphore method is not supported in transaction");
}
@Override
public RLock getLock(K key) {
throw new UnsupportedOperationException("getLock method is not supported in transaction");
}
@Override
public RReadWriteLock getReadWriteLock(K key) {
throw new UnsupportedOperationException("getReadWriteLock method is not supported in transaction");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.serveraction.upgrades;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentMap;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.actionmanager.ServiceComponentHostEventWrapper;
import org.apache.ambari.server.agent.CommandReport;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
import org.apache.ambari.server.orm.dao.UpgradeDAO;
import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
import org.apache.ambari.server.orm.entities.UpgradeGroupEntity;
import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
import org.apache.ambari.server.serveraction.AbstractServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ServiceComponentHostEvent;
import org.apache.ambari.server.state.StackId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.inject.Inject;
/**
* The {@link AutoSkipFailedSummaryAction} is used to check if any
* {@link HostRoleCommand}s were skipped automatically after they failed during
* an upgrade. This will be automatically marked as
* {@link HostRoleStatus#COMPLETED} if there are no skipped failures. Otherwise
* it will be placed into {@link HostRoleStatus#HOLDING}.
*/
public class AutoSkipFailedSummaryAction extends AbstractServerAction {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(AutoSkipFailedSummaryAction.class);
/**
* The standard output template message.
*/
private static final String FAILURE_STD_OUT_TEMPLATE = "There were {0} skipped failure(s) that must be addressed before you can proceed. Please resolve each failure before continuing with the upgrade.";
private static final String SKIPPED_SERVICE_CHECK = "service_check";
private static final String SKIPPED_HOST_COMPONENT = "host_component";
private static final String SKIPPED = "skipped";
private static final String FAILURES = "failures";
/**
* Used to lookup the {@link UpgradeGroupEntity}.
*/
@Inject
private UpgradeDAO m_upgradeDAO;
/**
* Used to lookup the tasks that need to be checked for
* {@link HostRoleStatus#SKIPPED_FAILED}.
*/
@Inject
private HostRoleCommandDAO m_hostRoleCommandDAO;
/**
* Used for writing structured out.
*/
@Inject
private Gson m_gson;
/**
* Used to look up service check name -> service name bindings
*/
@Inject
private ActionMetadata actionMetadata;
@Inject
private AmbariMetaInfo ambariMetaInfo;
@Inject
private Clusters clusters;
/**
* A mapping of host -> Map<key,info> for each failure.
*/
private Map<String, Object> m_structuredFailures = new HashMap<>();
/**
* {@inheritDoc}
*/
@Override
public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
throws AmbariException, InterruptedException {
HostRoleCommand hostRoleCommand = getHostRoleCommand();
long requestId = hostRoleCommand.getRequestId();
long stageId = hostRoleCommand.getStageId();
String clusterName = hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getClusterName();
Cluster cluster = clusters.getCluster(clusterName);
StackId stackId = cluster.getDesiredStackVersion();
// use the host role command to get to the parent upgrade group
UpgradeItemEntity upgradeItem = m_upgradeDAO.findUpgradeItemByRequestAndStage(requestId,stageId);
UpgradeGroupEntity upgradeGroup = upgradeItem.getGroupEntity();
// find all of the stages in this group
long upgradeGroupId = upgradeGroup.getId();
UpgradeGroupEntity upgradeGroupEntity = m_upgradeDAO.findUpgradeGroup(upgradeGroupId);
List<UpgradeItemEntity> groupUpgradeItems = upgradeGroupEntity.getItems();
TreeSet<Long> stageIds = new TreeSet<>();
for (UpgradeItemEntity groupUpgradeItem : groupUpgradeItems) {
stageIds.add(groupUpgradeItem.getStageId());
}
// for every stage, find all tasks that have been SKIPPED_FAILED - we use a
// bit of trickery here since within any given request, the stage ID are
// always sequential. This allows us to make a simple query instead of some
// overly complex IN or NESTED SELECT query
long minStageId = stageIds.first();
long maxStageId = stageIds.last();
List<HostRoleCommandEntity> skippedTasks = m_hostRoleCommandDAO.findByStatusBetweenStages(
hostRoleCommand.getRequestId(),
HostRoleStatus.SKIPPED_FAILED, minStageId, maxStageId);
if (skippedTasks.isEmpty()) {
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
"There were no skipped failures", null);
}
StringBuilder buffer = new StringBuilder("The following steps failed but were automatically skipped:\n");
Set<String> skippedCategories = new HashSet<>();
Map<String, Object> skippedFailures = new HashMap<>();
Set<String> skippedServiceChecks = new HashSet<>();
Map<String, Object> hostComponents= new HashMap<>();
// Internal representation for failed host components
// To avoid duplicates
// Format: <hostname, Set<Role>>
Map<String, Set<Role>> publishedHostComponents= new HashMap<>();
for (HostRoleCommandEntity skippedTask : skippedTasks) {
try {
String skippedCategory;
if (skippedTask.getRoleCommand().equals(RoleCommand.SERVICE_CHECK)) {
skippedCategory = SKIPPED_SERVICE_CHECK;
String serviceCheckActionName = skippedTask.getRole().toString();
String service = actionMetadata.getServiceNameByServiceCheckAction(serviceCheckActionName);
skippedServiceChecks.add(service);
skippedFailures.put(SKIPPED_SERVICE_CHECK, skippedServiceChecks);
m_structuredFailures.put(FAILURES, skippedFailures);
} else {
skippedCategory = SKIPPED_HOST_COMPONENT;
String hostName = skippedTask.getHostName();
if (null != hostName) {
List<Object> failures = (List<Object>) hostComponents.get(hostName);
if (null == failures) {
failures = new ArrayList<>();
hostComponents.put(hostName, failures);
publishedHostComponents.put(hostName, new HashSet<Role>());
}
Set<Role> publishedHostComponentsOnHost = publishedHostComponents.get(hostName);
Role role = skippedTask.getRole();
if (! publishedHostComponentsOnHost.contains(role)) {
HashMap<String, String> details = new HashMap<>();
String service = ambariMetaInfo.getComponentToService(
stackId.getStackName(), stackId.getStackVersion(), role.toString());
details.put("service", service);
details.put("component", role.toString());
failures.add(details);
}
}
skippedFailures.put(SKIPPED_HOST_COMPONENT, hostComponents);
m_structuredFailures.put(FAILURES, skippedFailures);
}
skippedCategories.add(skippedCategory);
ServiceComponentHostEventWrapper eventWrapper = new ServiceComponentHostEventWrapper(
skippedTask.getEvent());
ServiceComponentHostEvent event = eventWrapper.getEvent();
buffer.append(event.getServiceComponentName());
if (null != event.getHostName()) {
buffer.append(" on ");
buffer.append(event.getHostName());
}
buffer.append(": ");
buffer.append(skippedTask.getCommandDetail());
buffer.append("\n");
} catch (Exception exception) {
LOG.warn("Unable to extract failure information for {}", skippedTask);
buffer.append(": ");
buffer.append(skippedTask);
}
}
m_structuredFailures.put(SKIPPED, skippedCategories);
String structuredOutput = m_gson.toJson(m_structuredFailures);
String standardOutput = MessageFormat.format(FAILURE_STD_OUT_TEMPLATE, skippedTasks.size());
String standardError = buffer.toString();
return createCommandReport(0, HostRoleStatus.HOLDING, structuredOutput, standardOutput,
standardError);
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.conn.ssl;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URL;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.localserver.BasicServerTestBase;
import org.apache.http.localserver.LocalTestServer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link SSLSocketFactory}.
*/
public class TestSSLSocketFactory extends BasicServerTestBase {
private KeyManagerFactory createKeyManagerFactory() throws NoSuchAlgorithmException {
String algo = KeyManagerFactory.getDefaultAlgorithm();
try {
return KeyManagerFactory.getInstance(algo);
} catch (NoSuchAlgorithmException ex) {
return KeyManagerFactory.getInstance("SunX509");
}
}
private TrustManagerFactory createTrustManagerFactory() throws NoSuchAlgorithmException {
String algo = TrustManagerFactory.getDefaultAlgorithm();
try {
return TrustManagerFactory.getInstance(algo);
} catch (NoSuchAlgorithmException ex) {
return TrustManagerFactory.getInstance("SunX509");
}
}
private SSLContext serverSSLContext;
private SSLContext clientSSLContext;
@Before
public void setUp() throws Exception {
ClassLoader cl = getClass().getClassLoader();
URL url = cl.getResource("test.keystore");
KeyStore keystore = KeyStore.getInstance("jks");
char[] pwd = "nopassword".toCharArray();
keystore.load(url.openStream(), pwd);
TrustManagerFactory tmf = createTrustManagerFactory();
tmf.init(keystore);
TrustManager[] tm = tmf.getTrustManagers();
KeyManagerFactory kmfactory = createKeyManagerFactory();
kmfactory.init(keystore, pwd);
KeyManager[] km = kmfactory.getKeyManagers();
this.serverSSLContext = SSLContext.getInstance("TLS");
this.serverSSLContext.init(km, tm, null);
this.clientSSLContext = SSLContext.getInstance("TLS");
this.clientSSLContext.init(null, tm, null);
this.localServer = new LocalTestServer(this.serverSSLContext);
this.localServer.registerDefaultHandlers();
this.localServer.start();
this.httpclient = new DefaultHttpClient();
}
@Override
protected HttpHost getServerHttp() {
InetSocketAddress address = this.localServer.getServiceAddress();
return new HttpHost(
address.getHostName(),
address.getPort(),
"https");
}
static class TestX509HostnameVerifier implements X509HostnameVerifier {
private boolean fired = false;
public boolean verify(String host, SSLSession session) {
return true;
}
public void verify(String host, SSLSocket ssl) throws IOException {
this.fired = true;
}
public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException {
}
public void verify(String host, X509Certificate cert) throws SSLException {
}
public boolean isFired() {
return this.fired;
}
}
@Test
public void testBasicSSL() throws Exception {
TestX509HostnameVerifier hostVerifier = new TestX509HostnameVerifier();
SSLSocketFactory socketFactory = new SSLSocketFactory(this.clientSSLContext, hostVerifier);
Scheme https = new Scheme("https", 443, socketFactory);
this.httpclient.getConnectionManager().getSchemeRegistry().register(https);
HttpHost target = getServerHttp();
HttpGet httpget = new HttpGet("/random/100");
HttpResponse response = this.httpclient.execute(target, httpget);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
Assert.assertTrue(hostVerifier.isFired());
}
@Test(expected=SSLPeerUnverifiedException.class)
public void testSSLTrustVerification() throws Exception {
// Use default SSL context
SSLContext defaultsslcontext = SSLContext.getInstance("TLS");
defaultsslcontext.init(null, null, null);
SSLSocketFactory socketFactory = new SSLSocketFactory(defaultsslcontext,
SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
Scheme https = new Scheme("https", 443, socketFactory);
this.httpclient.getConnectionManager().getSchemeRegistry().register(https);
HttpHost target = getServerHttp();
HttpGet httpget = new HttpGet("/random/100");
this.httpclient.execute(target, httpget);
}
@Test
public void testSSLTrustVerificationOverride() throws Exception {
// Use default SSL context
SSLContext defaultsslcontext = SSLContext.getInstance("TLS");
defaultsslcontext.init(null, null, null);
SSLSocketFactory socketFactory = new SSLSocketFactory(new TrustStrategy() {
public boolean isTrusted(
final X509Certificate[] chain, final String authType) throws CertificateException {
return chain.length == 1;
}
}, SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
Scheme https = new Scheme("https", 443, socketFactory);
this.httpclient.getConnectionManager().getSchemeRegistry().register(https);
HttpHost target = getServerHttp();
HttpGet httpget = new HttpGet("/random/100");
HttpResponse response = this.httpclient.execute(target, httpget);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
}
}
| |
package jadx.api;
import jadx.core.codegen.CodeWriter;
import jadx.core.dex.attributes.AFlag;
import jadx.core.dex.attributes.nodes.LineAttrNode;
import jadx.core.dex.info.AccessInfo;
import jadx.core.dex.nodes.ClassNode;
import jadx.core.dex.nodes.FieldNode;
import jadx.core.dex.nodes.MethodNode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
public final class JavaClass implements JavaNode {
private final JadxDecompiler decompiler;
private final ClassNode cls;
private final JavaClass parent;
private List<JavaClass> innerClasses = Collections.emptyList();
private List<JavaField> fields = Collections.emptyList();
private List<JavaMethod> methods = Collections.emptyList();
JavaClass(ClassNode classNode, JadxDecompiler decompiler) {
this.decompiler = decompiler;
this.cls = classNode;
this.parent = null;
}
/**
* Inner classes constructor
*/
JavaClass(ClassNode classNode, JavaClass parent) {
this.decompiler = null;
this.cls = classNode;
this.parent = parent;
}
public String getCode() {
CodeWriter code = cls.getCode();
if (code == null) {
decompile();
code = cls.getCode();
}
if (code == null) {
return "";
}
return code.toString();
}
public void decompile() {
if (decompiler == null) {
return;
}
if (cls.getCode() == null) {
decompiler.processClass(cls);
load();
}
}
ClassNode getClassNode() {
return cls;
}
private void load() {
int inClsCount = cls.getInnerClasses().size();
if (inClsCount != 0) {
List<JavaClass> list = new ArrayList<JavaClass>(inClsCount);
for (ClassNode inner : cls.getInnerClasses()) {
if (!inner.contains(AFlag.DONT_GENERATE)) {
JavaClass javaClass = new JavaClass(inner, this);
javaClass.load();
list.add(javaClass);
}
}
this.innerClasses = Collections.unmodifiableList(list);
}
int fieldsCount = cls.getFields().size();
if (fieldsCount != 0) {
List<JavaField> flds = new ArrayList<JavaField>(fieldsCount);
for (FieldNode f : cls.getFields()) {
if (!f.contains(AFlag.DONT_GENERATE)) {
flds.add(new JavaField(f, this));
}
}
this.fields = Collections.unmodifiableList(flds);
}
int methodsCount = cls.getMethods().size();
if (methodsCount != 0) {
List<JavaMethod> mths = new ArrayList<JavaMethod>(methodsCount);
for (MethodNode m : cls.getMethods()) {
if (!m.contains(AFlag.DONT_GENERATE)) {
mths.add(new JavaMethod(this, m));
}
}
Collections.sort(mths, new Comparator<JavaMethod>() {
@Override
public int compare(JavaMethod o1, JavaMethod o2) {
return o1.getName().compareTo(o2.getName());
}
});
this.methods = Collections.unmodifiableList(mths);
}
}
private Map<CodePosition, Object> getCodeAnnotations() {
decompile();
return cls.getCode().getAnnotations();
}
public CodePosition getDefinitionPosition(int line, int offset) {
Map<CodePosition, Object> map = getCodeAnnotations();
if (map.isEmpty()) {
return null;
}
Object obj = map.get(new CodePosition(line, offset));
if (!(obj instanceof LineAttrNode)) {
return null;
}
ClassNode clsNode = null;
if (obj instanceof ClassNode) {
clsNode = (ClassNode) obj;
} else if (obj instanceof MethodNode) {
clsNode = ((MethodNode) obj).getParentClass();
} else if (obj instanceof FieldNode) {
clsNode = ((FieldNode) obj).getParentClass();
}
if (clsNode == null) {
return null;
}
clsNode = clsNode.getTopParentClass();
JavaClass jCls = decompiler.findJavaClass(clsNode);
if (jCls == null) {
return null;
}
jCls.decompile();
int defLine = ((LineAttrNode) obj).getDecompiledLine();
if (defLine == 0) {
return null;
}
return new CodePosition(jCls, defLine, 0);
}
public Integer getSourceLine(int decompiledLine) {
decompile();
return cls.getCode().getLineMapping().get(decompiledLine);
}
@Override
public String getName() {
return cls.getShortName();
}
@Override
public String getFullName() {
return cls.getFullName();
}
public String getPackage() {
return cls.getPackage();
}
@Override
public JavaClass getDeclaringClass() {
return parent;
}
public AccessInfo getAccessInfo() {
return cls.getAccessFlags();
}
public List<JavaClass> getInnerClasses() {
decompile();
return innerClasses;
}
public List<JavaField> getFields() {
decompile();
return fields;
}
public List<JavaMethod> getMethods() {
decompile();
return methods;
}
public int getDecompiledLine() {
return cls.getDecompiledLine();
}
@Override
public boolean equals(Object o) {
return this == o || o instanceof JavaClass && cls.equals(((JavaClass) o).cls);
}
@Override
public int hashCode() {
return cls.hashCode();
}
@Override
public String toString() {
return cls.getFullName() + "[ " + getFullName() + " ]";
}
}
| |
/*
Derby - Class ClobTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.derbyTesting.functionTests.tests.jdbc4;
import junit.framework.*;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.TestConfiguration;
import java.sql.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
/* This class is used to store the details of the methods that
* throw a SQLFeatureNotSupportedException in the implementation
* of java.sql.Clob.
*
* It store the following information about the methods
*
* a) Name
* b) Method Parameters
* c) Whether the method is exempted in the Embedded Sever
* d) Whether the method is exempted in the NetworkClient
*
*/
import org.apache.derbyTesting.functionTests.util.streams.LoopingAlphabetReader;
import org.apache.derbyTesting.junit.DatabasePropertyTestSetup;
class ExemptClobMD {
// The Name of the method
private String methodName_;
// The parameters of the method
private Class [] params_;
//Whether it is exempted in the
//Client or the Embedded framework
private boolean isClientFramework_;
private boolean isEmbeddedFramework_;
/**
* The Constructor for the ExemptClobMD class that
* initialized the object with the details of the
* methods that have been exempted
*
* @param methodName A String that contains the name of the method
* that has been exempted.
* @param params A array of Class that contains the parameters
* of the methods.
* @param isClientFramework true if the method is exempted in the
* Client framework.
* @param isEmbeddedFramework true if the method is exempted in the
* Embedded framework.
*
*/
public ExemptClobMD(String methodName,Class [] params,
boolean isClientFramework,
boolean isEmbeddedFramework) {
methodName_ = methodName;
params_ = params;
isClientFramework_ = isClientFramework;
isEmbeddedFramework_ = isEmbeddedFramework;
}
/**
*
* Returns the name of the method.
*
* @return A String containing the name of the method.
*
*/
public String getMethodName() { return methodName_; }
/**
* Returns a array of Class containing the type of the parameters
* of this method.
*
* @return A array of Class containing the type of the parameters
* of the method.
*/
public Class [] getParams() { return params_; }
/**
* Returns if the method is exempted from the Client Framework.
*
* @return true if the method is exempted from the Client Framework.
*/
public boolean getIfClientFramework() { return isClientFramework_; }
/**
* Returns if the method is exempted from the Embedded Framework.
*
* @return true if the method is exempted from the Embedded Framework.
*/
public boolean getIfEmbeddedFramework() { return isEmbeddedFramework_; }
}
/*
* Tests of the JDBC 4.0 specific <code>Clob</code> methods.
*/
public class ClobTest
extends BaseJDBCTestCase {
/** Default Clob object used by the tests. */
private Clob clob = null;
// Initialize with the details of the method that are exempted from
//throwing a SQLException when they are called after calling free()
//on a LOB.
private static final ExemptClobMD [] emd = new ExemptClobMD [] {
new ExemptClobMD( "getCharacterStream", new Class[] { long.class, long.class } ,true,true),
new ExemptClobMD( "setString", new Class[] { long.class, String.class } ,false,true),
new ExemptClobMD( "truncate", new Class[] { long.class },false,true),
new ExemptClobMD( "free", null,true,true)
};
// An HashMap that is indexed by the Method which facilitated easy
//search for whether the given method has been exempted from the
//LOB interface.
private HashMap<Method,ExemptClobMD> excludedMethodSet =
new HashMap<Method,ExemptClobMD>();
/**
* Create the test with the given name.
*
* @param name name of the test.
*/
public ClobTest(String name) {
super(name);
}
public void setUp()
throws SQLException {
// Life span of Clob objects are limited by the transaction. Need
// autocommit off so Clob objects survive closing of result set.
getConnection().setAutoCommit(false);
getConnection().setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
}
protected void tearDown() throws Exception {
if (clob != null) {
clob.free();
clob = null;
}
excludedMethodSet = null;
super.tearDown();
}
/**
* Builds the HashSet which will be used to test whether the given methods
* can be exempted or not
*/
void buildHashSet() {
Class iface = Clob.class;
for(int i=0;i<emd.length;i++) {
try {
Method m = iface.getMethod(emd[i].getMethodName()
,emd[i].getParams());
excludedMethodSet.put(m,emd[i]);
}
catch(NoSuchMethodException nsme) {
fail("The method could not be found in the interface");
}
}
}
/**
* Tests the implementation for the free() method in the
* Clob interface.
*
* @throws SQLException if an error occurs during releasing
* the Clob resources
*
*/
public void testFreeandMethodsAfterCallingFree()
throws IllegalAccessException, InvocationTargetException, SQLException
{
clob = BlobClobTestSetup.getSampleClob(getConnection());
//call the buildHashSetMethod to initialize the
//HashSet with the method signatures that are exempted
//from throwing a SQLException after free has been called
//on the Clob object.
buildHashSet();
InputStream asciiStream = clob.getAsciiStream();
Reader charStream = clob.getCharacterStream();
clob.free();
//testing the idempotence of the free() method
//the method can be called multiple times on
//the first are treated as no-ops
clob.free();
//to the free method so testing calling
//a method on this invalid object should throw
//an SQLException
buildMethodList(clob);
}
/*
*
* Enumerate the methods of the Clob interface and
* get the list of methods present in the interface
* @param LOB an instance of the Clob interface implementation
*/
void buildMethodList(Object LOB)
throws IllegalAccessException, InvocationTargetException {
//If the given method throws the correct exception
//set this to true and add it to the
boolean valid = true;
//create a list of the methods that fail the test
Vector<Method> methodList = new Vector<Method>();
//The class whose methods are to be verified
Class clazz = Clob.class;
//The list of the methods in the class that need to be invoked
//and verified
Method [] methods = clazz.getMethods();
//Check each of the methods to ensure that
//they throw the required exception
for(int i=0;i<methods.length;i++) {
if(!checkIfExempted(methods[i])) {
valid = checkIfMethodThrowsSQLException(LOB,methods[i]);
//add the method to the list if the method does
//not throw the required exception
if(valid == false) methodList.add(methods[i]);
//reset valid
valid = true;
}
}
if(!methodList.isEmpty()) {
int c=0;
String failureMessage = "The Following methods don't throw " +
"required exception - ";
for (Method m : methodList) {
c = c + 1;
if(c == methodList.size() && c != 1)
failureMessage += " & ";
else if(c != 1)
failureMessage += " , ";
failureMessage += m.getName();
}
fail(failureMessage);
}
}
/**
* Checks if the method is to be exempted from testing or not.
*
* @param m the method to check for exemption
* @return <code>false</code> if the method shall be tested,
* <code>true</code> if the method is exempted and shall not be tested.
*/
boolean checkIfExempted(Method m) {
ExemptClobMD md = excludedMethodSet.get(m);
boolean isExempted = false;
if (md != null) {
if (usingDerbyNetClient()) {
isExempted = md.getIfClientFramework();
} else if (usingEmbedded()) {
isExempted = md.getIfEmbeddedFramework();
} else {
fail("Unknown test environment/framework");
}
}
return isExempted;
}
/*
* Checks if the invocation of the method throws a SQLExceptio
* as expected.
* @param LOB the Object that implements the Blob interface
* @param method the method that needs to be tested to ensure
* that it throws the correct exception
* @return true If the method throws the SQLException required
* after the free method has been called on the
* LOB object
*
*/
boolean checkIfMethodThrowsSQLException(Object LOB,Method method)
throws IllegalAccessException, InvocationTargetException {
try {
method.invoke(LOB,getNullValues(method.getParameterTypes()));
} catch (InvocationTargetException ite) {
Throwable cause = ite.getCause();
if (cause instanceof SQLException ) {
return ((SQLException)cause).getSQLState().equals("XJ215");
}
throw ite;
}
return false;
}
/*
* Return a array of objects containing the default values for
* the objects passed in as parameters
*
* @param parameterTypes an array containing the types of the parameter
* to the method
* @return an array of Objects containing the null values for the
* parameter inputs
*/
Object[] getNullValues(Class<?> [] params) {
Object[] args = new Object[params.length];
for (int i = 0; i < params.length; i++) {
args[i] = getNullValueForType(params[i]);
}
return args;
}
/*
* Returns the null value for the specific type
*
* @param type the type of the parameter for which the null
* value is required
* @return the null value for the specific type
*
*/
Object getNullValueForType(Class type)
{
if (!type.isPrimitive()) {
return null;
}
if (type == Boolean.TYPE) {
return Boolean.FALSE;
}
if (type == Character.TYPE) {
return new Character((char) 0);
}
if (type == Byte.TYPE) {
return new Byte((byte) 0);
}
if (type == Short.TYPE) {
return new Short((short) 0);
}
if (type == Integer.TYPE) {
return new Integer(0);
}
if (type == Long.TYPE) {
return new Long(0L);
}
if (type == Float.TYPE) {
return new Float(0f);
}
if (type == Double.TYPE) {
return new Double(0d);
}
fail("Don't know how to handle type " + type);
return null; // unreachable statement
}
/**
* Tests the implementation of getCharacterStream(long pos, long length).
*
* @throws Exception
*/
public void testGetCharacterStreamLong()
throws Exception {
String str1 = "This is a test String. This is a test String";
Reader r1 = new java.io.StringReader(str1);
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2,r1);
ps.execute();
ps.close();
Statement st = createStatement();
ResultSet rs = st.executeQuery("select CLOBDATA from " +
"BLOBCLOB where ID="+id);
rs.next();
Clob clob = rs.getClob(1);
Reader r_1 = clob.getCharacterStream(2L,5L);
String str2 = str1.substring(1,6);
Reader r_2 = new java.io.StringReader(str2);
assertEquals(r_2,r_1);
rs.close();
st.close();
}
/**
* Test that <code>Clob.getCharacterStream(long,long)</code> works on CLOBs
* that are streamed from store. (DERBY-2891)
*/
public void testGetCharacterStreamLongOnLargeClob() throws Exception {
getConnection().setAutoCommit(false);
// create large (>32k) clob that can be read from store
final int size = 33000;
StringBuilder sb = new StringBuilder(size);
for (int i = 0; i < size; i += 10) {
sb.append("1234567890");
}
final int id = BlobClobTestSetup.getID();
PreparedStatement ps = prepareStatement(
"insert into blobclob(id, clobdata) values (?,cast(? as clob))");
ps.setInt(1, id);
ps.setString(2, sb.toString());
ps.executeUpdate();
ps.close();
Statement s = createStatement();
ResultSet rs = s.executeQuery(
"select clobdata from blobclob where id = " + id);
assertTrue(rs.next());
Clob c = rs.getClob(1);
// request a small region of the clob
BufferedReader r = new BufferedReader(c.getCharacterStream(4L, 3L));
assertEquals("456", r.readLine());
r.close();
c.free();
rs.close();
s.close();
rollback();
}
/**
* Tests the exceptions thrown by the getCharacterStream
* (long pos, long length) for the following conditions
* a) pos <= 0
* b) pos > (length of LOB)
* c) length < 0
* d) pos + length > (length of LOB).
*
* @throws SQLException
*/
public void testGetCharacterStreamLongExceptionConditions()
throws SQLException {
String str1 = "This is a test String. This is a test String";
Reader r1 = new java.io.StringReader(str1);
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2,r1);
ps.execute();
ps.close();
Statement st = createStatement();
ResultSet rs = st.executeQuery("select CLOBDATA from " +
"BLOBCLOB where ID="+id);
rs.next();
Clob clob = rs.getClob(1);
// check the case where pos <= 0
try {
// set pos as negative
clob.getCharacterStream(-2L,5L);
//Should not come here. The exception has to be thrown.
fail("FAIL: Expected SQLException for pos being negative " +
"not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when pos <= 0 is XJ070
assertSQLState("XJ070", sqle);
}
// check for the case pos > length of clob
try {
// set the pos to any value greater than the Clob length
clob.getCharacterStream(clob.length()+1, 5L);
//Should not come here. The exception has to be thrown.
fail("FAIL: Expected SQLException for position being greater than " +
"length of LOB not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when pos > length of Clob
// is XJ076
assertSQLState("XJ087", sqle);
}
//check for the case when length < 0
try {
// set length as negative
clob.getCharacterStream(2L, -5L);
// Should not come here. The exception has to be thrown.
fail("Fail: expected exception for the length being negative " +
"not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when length < 0 of Clob
// is XJ071
assertSQLState("XJ071", sqle);
}
//check for the case when pos + length > length of Clob
try {
// set pos + length > length of Clob
clob.getCharacterStream((clob.length() - 4), 10L);
// Should not come here. The exception has to be thrown.
fail("Fail: expected exception for the sum of position and length" +
" being greater than the LOB size not thrown");
}
catch(SQLException sqle) {
// The SQLState for the exception thrown when length < 0 of Clob
// is XJ087
assertSQLState("XJ087", sqle);
}
}
/**
* Tests that the InputStream got from
* a empty Clob reflects new data in the
* underlying Clob.
*
* @throws Exception
*/
public void testGetAsciiStreamCreateClob() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str = "Hi I am the insert String";
//Create the InputStream that will
//be used for comparing the Stream
//that is obtained from the Blob after
//the update.
ByteArrayInputStream str_is = new ByteArrayInputStream
(str.getBytes("US-ASCII"));
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the InputStream from this
//Clob
InputStream is = clob.getAsciiStream();
//set the String into the clob.
clob.setString(1, str);
//Ensure that the Stream obtained from
//the clob contains the expected bytes
assertEquals(str_is, is);
}
/**
* Tests that the Reader got from
* a empty Clob reflects new data in the
* underlying Clob.
*
* @throws Exception
*/
public void testGetCharacterStreamCreateClob() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str = "Hi I am the insert String";
//The string reader corresponding to this
//string that will be used in the comparison.
StringReader r_string = new StringReader(str);
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the Reader from this
//Clob
Reader r_clob = clob.getCharacterStream();
//set the String into the clob.
clob.setString(1, str);
//Now compare the reader corresponding
//to the string and the reader obtained
//form the clob to see if they match.
assertEquals(r_string, r_clob);
}
/**
* Tests that the data updated in a Clob
* is always reflected in the InputStream
* got. Here the updates into the Clob are
* done using both an OutputStream obtained
* from this Clob as well as using Clob.setString.
*
* @throws Exception
*/
public void testGetAsciiStreamClobUpdates() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str1 = "Hi I am the insert string";
//Stores the byte array representation of
//the insert string.
byte[] str1_bytes = str1.getBytes();
//The String that will be used in the
//second series of updates
String str2 = "Hi I am the update string";
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the InputStream from this
//Clob before any writes happen.
InputStream is_BeforeWrite = clob.getAsciiStream();
//Get an OutputStream from this Clob
//into which the data can be written
OutputStream os = clob.setAsciiStream(1);
os.write(str1_bytes);
//Doing a setString now on the Clob
//should reflect the same extension
//in the InputStream also.
clob.setString((str1_bytes.length)+1, str2);
//Get the input stream from the
//Clob after the update
InputStream is_AfterWrite = clob.getAsciiStream();
//Now check if the two InputStreams
//match
assertEquals(is_BeforeWrite, is_AfterWrite);
}
/**
* Tests that the data updated in a Clob
* is always reflected in the Reader
* got. Here the updates are done using
* both a Writer obtained from this Clob
* and using Clob.setString.
*
* @throws Exception
*/
public void testGetCharacterStreamClobUpdates() throws Exception {
//The String that will be used
//to do the inserts into the
//Clob.
String str1 = "Hi I am the insert string";
//The String that will be used in the
//second series of updates
String str2 = "Hi I am the update string";
//create the empty Clob.
Clob clob = getConnection().createClob();
//Get the Reader from this
//Clob
Reader r_BeforeWrite = clob.getCharacterStream();
//Get a writer from this Clob
//into which the data can be written
Writer w = clob.setCharacterStream(1);
char [] chars_str1 = new char[str1.length()];
str2.getChars(0, str1.length(), chars_str1, 0);
w.write(chars_str1);
//Doing a setString now on the Clob
//should reflect the same extension
//in the InputStream also.
clob.setString((str1.getBytes().length)+1, str2);
//Now get the reader from the Clob after
//the update has been done.
Reader r_AfterWrite = clob.getCharacterStream();
//Now compare the two readers to see that they
//contain the same data.
assertEquals(r_BeforeWrite, r_AfterWrite);
}
/**
* Test that a lock held on the corresponding row is released when free() is
* called on the Clob object.
* @throws java.sql.SQLException
*/
public void testLockingAfterFree() throws SQLException
{
int id = initializeLongClob(); // Opens clob object
// GemStone changes BEGIN
// we fail-fast instead of timing out
executeParallelUpdate(id, false); // Test that timeout occurs
// GemStone changes END
// Test that update goes through after the clob is closed
clob.free();
executeParallelUpdate(id, false);
commit();
}
/**
* Test that a lock held on the corresponding row is NOT released when
* free() is called on the Clob object if the isolation level is
* Repeatable Read
* @throws java.sql.SQLException
*/
public void testLockingAfterFreeWithRR() throws SQLException
{
getConnection().
setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
int id = initializeLongClob(); // Opens clob object
executeParallelUpdate(id, true); // Test that timeout occurs
// Test that update still times out after the clob is closed
clob.free();
executeParallelUpdate(id, true);
// Test that the update goes through after the transaction has committed
commit();
executeParallelUpdate(id, false);
}
/**
* Test that a lock held on the corresponding row is released when
* free() is called on the Clob object if the isolation level is
* Read Uncommitted
* @throws java.sql.SQLException
*/
public void testLockingAfterFreeWithDirtyReads() throws SQLException
{
getConnection().
setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
int id = initializeLongClob(); // Opens clob object
// GemStone changes BEGIN
// we fail-fast instead of timing out
executeParallelUpdate(id, false); // Test that timeout occurs
// GemStone changes END
// Test that update goes through after the clob is closed
clob.free();
executeParallelUpdate(id, false);
commit();
}
/**
* Insert a row with a large clob into the test table. Read the row from
* the database and assign the clob value to <code>clob</code>.
* @return The id of the row that was inserted
* @throws java.sql.SQLException
*/
private int initializeLongClob() throws SQLException
{
// Clob needs to be larger than one page for locking to occur
final int lobLength = 40000;
// Insert a long Clob
PreparedStatement ps = prepareStatement(
"insert into BLOBCLOB(ID, CLOBDATA) values(?,?)");
int id = BlobClobTestSetup.getID();
ps.setInt(1,id);
ps.setCharacterStream(2, new LoopingAlphabetReader(lobLength), lobLength);
ps.execute();
ps.close();
commit();
// Fetch the Clob object from the database
Statement st = createStatement();
ResultSet rs =
st.executeQuery("select CLOBDATA from BLOBCLOB where ID=" + id);
rs.next();
clob = rs.getClob(1);
rs.close();
st.close();
return id;
}
/**
* Try to update the row with the given error. Flag a failure if a
* timeout occurs when not expected, and vice versa.
* @param id The id of the row to be updated
* @param timeoutExpected true if it is expected that the update times out
* @throws java.sql.SQLException
*/
private void executeParallelUpdate(int id, boolean timeoutExpected)
throws SQLException
{
Connection conn2 = openDefaultConnection();
Statement stmt2 = conn2.createStatement();
try {
stmt2.executeUpdate("update BLOBCLOB set BLOBDATA = " +
"cast(X'FFFFFF' as blob) where ID=" + id);
stmt2.close();
conn2.commit();
conn2.close();
if (timeoutExpected) {
fail("FAIL - should have gotten lock timeout");
}
} catch (SQLException se) {
stmt2.close();
conn2.rollback();
conn2.close();
if (timeoutExpected) {
assertSQLState(LOCK_TIMEOUT, se);
} else {
throw se;
}
}
}
/**
* Create test suite for this test.
*/
public static Test suite()
{
// GemStone changes BEGIN
// We fail-fast on conflicts, so don't do lock timeouts
return new BlobClobTestSetup(TestConfiguration.defaultSuite(ClobTest.class, false));
// return new BlobClobTestSetup(
// // Reduce lock timeouts so lock test case does not take too long
// DatabasePropertyTestSetup.setLockTimeouts(
// TestConfiguration.defaultSuite(ClobTest.class, false),
// 2,
// 4));
// GemStone changes END
}
private static final String LOCK_TIMEOUT = "40XL1";
} // End class ClobTest
| |
/*
* Copyright 1997-2017 Optimatika (www.optimatika.se)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.algo.function;
import static org.algo.constant.BigMath.*;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import org.algo.type.context.NumberContext;
/**
* Only the reference type parameter (BigDecimal) methods are actually implemented. The methods with the
* primitive parameters (double) should create a BigDecimal and then delegate to the primitive methods (and do
* nothing else). If possible the implementations should be pure BigDecimal arithmatic without rounding. If
* rounding is necessary MathContext.DECIMAL128 should be used. If BigDecimal arithmatic is not possible at
* all the implementation should delegate to PrimitiveFunction.
*
* @author apete
*/
public final class BigFunction extends FunctionSet<BigDecimal> {
@FunctionalInterface
public static interface Binary extends BinaryFunction<BigDecimal> {
default double invoke(final double arg1, final double arg2) {
return this.invoke(BigDecimal.valueOf(arg1), BigDecimal.valueOf(arg2)).doubleValue();
}
}
@FunctionalInterface
public static interface Parameter extends ParameterFunction<BigDecimal> {
default double invoke(final double arg, final int param) {
return this.invoke(BigDecimal.valueOf(arg), param).doubleValue();
}
}
@FunctionalInterface
public static interface Unary extends UnaryFunction<BigDecimal> {
default double invoke(final double arg) {
return this.invoke(BigDecimal.valueOf(arg)).doubleValue();
}
}
public static final Unary ABS = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg.abs();
}
};
public static final Unary ACOS = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ACOS.invoke(arg.doubleValue()));
}
};
public static final Unary ACOSH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ACOSH.invoke(arg.doubleValue()));
}
};
public static final Binary ADD = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.add(arg2);
}
};
public static final Unary ASIN = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ASIN.invoke(arg.doubleValue()));
}
};
public static final Unary ASINH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ASINH.invoke(arg.doubleValue()));
}
};
public static final Unary ATAN = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ATAN.invoke(arg.doubleValue()));
}
};
public static final Binary ATAN2 = new Binary() {
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return BigDecimal.valueOf(PrimitiveFunction.ATAN2.invoke(arg1.doubleValue(), arg2.doubleValue()));
}
};
public static final Unary ATANH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.ATANH.invoke(arg.doubleValue()));
}
};
public static final Unary CARDINALITY = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg.signum() == 0 ? ZERO : ONE;
}
};
public static final Unary CBRT = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return ROOT.invoke(arg, 3);
}
};
public static final Unary CEIL = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg.setScale(0, RoundingMode.CEILING);
}
};
public static final Unary CONJUGATE = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg;
}
};
public static final Unary COS = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.COS.invoke(arg.doubleValue()));
}
};
public static final Unary COSH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.COSH.invoke(arg.doubleValue()));
}
};
public static final Binary DIVIDE = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.divide(arg2, CONTEXT);
}
};
public static final Unary EXP = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.EXP.invoke(arg.doubleValue()));
}
};
public static final Unary EXPM1 = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.EXPM1.invoke(arg.doubleValue()));
}
};
public static final Unary FLOOR = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg.setScale(0, RoundingMode.FLOOR);
}
};
public static final Binary HYPOT = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return SQRT.invoke(arg1.multiply(arg1).add(arg2.multiply(arg2)));
}
};
public static final Unary INVERT = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return DIVIDE.invoke(ONE, arg);
}
};
public static final Unary LOG = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.LOG.invoke(arg.doubleValue()));
}
};
public static final Unary LOG10 = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.LOG10.invoke(arg.doubleValue()));
}
};
public static final Unary LOG1P = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.LOG1P.invoke(arg.doubleValue()));
}
};
public static final Unary LOGISTIC = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.LOGISTIC.invoke(arg.doubleValue()));
}
};
public static final Unary LOGIT = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.LOGIT.invoke(arg.doubleValue()));
}
};
public static final Binary MAX = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.max(arg2);
}
};
public static final Binary MIN = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.min(arg2);
}
};
public static final Binary MULTIPLY = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.multiply(arg2);
}
};
public static final Unary NEGATE = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg.negate();
}
};
public static final Binary POW = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
if (arg1.signum() == 0) {
return ZERO;
} else if (arg2.signum() == 0) {
return ONE;
} else if (arg2.compareTo(ONE) == 0) {
return arg1;
} else if (arg1.signum() == -1) {
throw new IllegalArgumentException();
} else {
return EXP.invoke(LOG.invoke(arg1).multiply(arg2));
}
}
};
public static final Parameter POWER = new Parameter() {
@Override
public final BigDecimal invoke(final BigDecimal arg, final int param) {
return arg.pow(param);
}
};
public static final Unary RINT = new Unary() {
@Override
public final BigDecimal invoke(final BigDecimal arg) {
return arg.setScale(0, CONTEXT.getRoundingMode());
}
};
public static final Parameter ROOT = new Parameter() {
@Override
public final BigDecimal invoke(final BigDecimal arg, final int param) {
if (param <= 0) {
throw new IllegalArgumentException();
} else if (param == 1) {
return arg;
} else if (param == 2) {
return SQRT.invoke(arg);
} else {
final BigDecimal tmpArg = arg.round(CONTEXT);
final BigDecimal tmpParam = BigDecimal.valueOf(param);
BigDecimal retVal = ZERO;
final double tmpDoubleArg = arg.doubleValue();
if (!Double.isInfinite(tmpDoubleArg) && !Double.isNaN(tmpDoubleArg)) {
retVal = BigDecimal.valueOf(PrimitiveFunction.ROOT.invoke(tmpDoubleArg, param)); // Intial guess
}
BigDecimal tmpShouldBeZero;
while ((tmpShouldBeZero = retVal.pow(param, CONTEXT).subtract(tmpArg)).signum() != 0) {
retVal = retVal.subtract(tmpShouldBeZero.divide(tmpParam.multiply(retVal.pow(param - 1)), CONTEXT));
}
return retVal;
}
}
};
public static final Parameter SCALE = new Parameter() {
@Override
public final BigDecimal invoke(final BigDecimal arg, final int param) {
return arg.setScale(param, CONTEXT.getRoundingMode());
}
};
public static final Unary SIGNUM = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
switch (arg.signum()) {
case 1:
return ONE;
case -1:
return ONE.negate();
default:
return ZERO;
}
}
};
public static final Unary SIN = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.SIN.invoke(arg.doubleValue()));
}
};
public static final Unary SINH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.SINH.invoke(arg.doubleValue()));
}
};
public static final Unary SQRT = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
final BigDecimal tmpArg = arg.round(CONTEXT);
BigDecimal retVal = ZERO;
final double tmpDoubleArg = arg.doubleValue();
if (!Double.isInfinite(tmpDoubleArg) && !Double.isNaN(tmpDoubleArg)) {
retVal = BigDecimal.valueOf(PrimitiveFunction.SQRT.invoke(tmpDoubleArg)); // Intial guess
}
BigDecimal tmpShouldBeZero;
while ((tmpShouldBeZero = retVal.multiply(retVal, CONTEXT).subtract(tmpArg)).signum() != 0) {
retVal = retVal.subtract(tmpShouldBeZero.divide(TWO.multiply(retVal), CONTEXT));
}
return retVal;
}
};
public static final Unary SQRT1PX2 = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return SQRT.invoke(ONE.add(arg.multiply(arg)));
}
};
public static final Binary SUBTRACT = new Binary() {
@Override
public final BigDecimal invoke(final BigDecimal arg1, final BigDecimal arg2) {
return arg1.subtract(arg2);
}
};
public static final Unary TAN = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.TAN.invoke(arg.doubleValue()));
}
};
public static final Unary TANH = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return BigDecimal.valueOf(PrimitiveFunction.TANH.invoke(arg.doubleValue()));
}
};
public static final Unary VALUE = new Unary() {
public final BigDecimal invoke(final BigDecimal arg) {
return arg;
}
};
private static final MathContext CONTEXT = MathContext.DECIMAL128;
private static final BigFunction SET = new BigFunction();
public static BigFunction getSet() {
return SET;
}
private BigFunction() {
super();
}
@Override
public UnaryFunction<BigDecimal> abs() {
return ABS;
}
@Override
public UnaryFunction<BigDecimal> acos() {
return ACOS;
}
@Override
public UnaryFunction<BigDecimal> acosh() {
return ACOSH;
}
@Override
public BinaryFunction<BigDecimal> add() {
return ADD;
}
@Override
public UnaryFunction<BigDecimal> asin() {
return ASIN;
}
@Override
public UnaryFunction<BigDecimal> asinh() {
return ASINH;
}
@Override
public UnaryFunction<BigDecimal> atan() {
return ATAN;
}
@Override
public BinaryFunction<BigDecimal> atan2() {
return ATAN2;
}
@Override
public UnaryFunction<BigDecimal> atanh() {
return ATANH;
}
@Override
public UnaryFunction<BigDecimal> cardinality() {
return CARDINALITY;
}
@Override
public UnaryFunction<BigDecimal> cbrt() {
return CBRT;
}
@Override
public UnaryFunction<BigDecimal> ceil() {
return CEIL;
}
@Override
public UnaryFunction<BigDecimal> conjugate() {
return CONJUGATE;
}
@Override
public UnaryFunction<BigDecimal> cos() {
return COS;
}
@Override
public UnaryFunction<BigDecimal> cosh() {
return COSH;
}
@Override
public BinaryFunction<BigDecimal> divide() {
return DIVIDE;
}
@Override
public Unary enforce(final NumberContext context) {
return t -> context.enforce(t);
}
@Override
public UnaryFunction<BigDecimal> exp() {
return EXP;
}
@Override
public UnaryFunction<BigDecimal> expm1() {
return EXPM1;
}
@Override
public UnaryFunction<BigDecimal> floor() {
return FLOOR;
}
@Override
public BinaryFunction<BigDecimal> hypot() {
return HYPOT;
}
@Override
public UnaryFunction<BigDecimal> invert() {
return INVERT;
}
@Override
public UnaryFunction<BigDecimal> log() {
return LOG;
}
@Override
public UnaryFunction<BigDecimal> log10() {
return LOG10;
}
@Override
public UnaryFunction<BigDecimal> log1p() {
return LOG1P;
}
@Override
public UnaryFunction<BigDecimal> logistic() {
return LOGISTIC;
}
@Override
public UnaryFunction<BigDecimal> logit() {
return LOGIT;
}
@Override
public BinaryFunction<BigDecimal> max() {
return MAX;
}
@Override
public BinaryFunction<BigDecimal> min() {
return MIN;
}
@Override
public BinaryFunction<BigDecimal> multiply() {
return MULTIPLY;
}
@Override
public UnaryFunction<BigDecimal> negate() {
return NEGATE;
}
@Override
public BinaryFunction<BigDecimal> pow() {
return POW;
}
@Override
public ParameterFunction<BigDecimal> power() {
return POWER;
}
@Override
public UnaryFunction<BigDecimal> rint() {
return RINT;
}
@Override
public ParameterFunction<BigDecimal> root() {
return ROOT;
}
@Override
public ParameterFunction<BigDecimal> scale() {
return SCALE;
}
@Override
public UnaryFunction<BigDecimal> signum() {
return SIGNUM;
}
@Override
public UnaryFunction<BigDecimal> sin() {
return SIN;
}
@Override
public UnaryFunction<BigDecimal> sinh() {
return SINH;
}
@Override
public UnaryFunction<BigDecimal> sqrt() {
return SQRT;
}
@Override
public UnaryFunction<BigDecimal> sqrt1px2() {
return SQRT1PX2;
}
@Override
public BinaryFunction<BigDecimal> subtract() {
return SUBTRACT;
}
@Override
public UnaryFunction<BigDecimal> tan() {
return TAN;
}
@Override
public UnaryFunction<BigDecimal> tanh() {
return TANH;
}
@Override
public UnaryFunction<BigDecimal> value() {
return VALUE;
}
}
| |
package org.fiteagle.interactors.sfa.util;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509Certificate;
import java.util.GregorianCalendar;
import java.util.UUID;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import org.fiteagle.core.aaa.CertificateAuthority;
import org.fiteagle.core.aaa.KeyStoreManagement;
import org.fiteagle.core.aaa.KeyStoreManagement.CertificateNotFoundException;
import org.fiteagle.core.aaa.x509.X509Util;
import org.fiteagle.core.config.InterfaceConfiguration;
import org.fiteagle.core.groupmanagement.Group;
import org.fiteagle.core.groupmanagement.GroupDBManager;
import org.fiteagle.core.util.URN;
import org.fiteagle.interactors.sfa.getSelfCredential.jaxbClasses.Credential;
import org.fiteagle.interactors.sfa.getSelfCredential.jaxbClasses.Privilege;
import org.fiteagle.interactors.sfa.getSelfCredential.jaxbClasses.Privileges;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CredentialFactoryWorker {
private Credential credential;
private X509Certificate userCertificate;
URN target;
private X509Certificate targetCertificate;
private Logger log = LoggerFactory.getLogger(this.getClass());
private GroupDBManager groupManager;
private KeyStoreManagement keyStoreManagement;
public CredentialFactoryWorker(
X509Certificate credentialCertificate, URN target) {
this.userCertificate = credentialCertificate;
this.target = getTargetURN(target);
}
private void setTargetCertificate() {
try {
targetCertificate = getTargetCertificate();
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private X509Certificate getTargetCertificate() throws Exception {
if (target.getType().equalsIgnoreCase("slice")) {
Group group = groupManager.getGroup(target.getSubjectAtDomain());
try {
return keyStoreManagement.getResourceCertificate(target
.getSubjectAtDomain());
} catch (CertificateNotFoundException e) {
X509Certificate groupCertificate = CertificateAuthority
.getInstance().createCertificate(group);
keyStoreManagement.storeResourceCertificate(groupCertificate);
return groupCertificate;
}
}
if (target.getType().equalsIgnoreCase("authority")) {
return keyStoreManagement.getSliceAuthorityCert();
}
throw new RuntimeException();
}
private void setId() {
credential.setId(UUID.randomUUID().toString());
}
private void setType() {
credential.setType("privilege");
}
private void setOwnerGID() {
X509Certificate returnCert = userCertificate;
CertificateAuthority ca = CertificateAuthority.getInstance();
if (X509Util.isSelfSigned(userCertificate)) {
try {
returnCert = ca.createCertificate(userCertificate);
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage());
}
}
String returnString;
try {
returnString = X509Util.getCertificateBodyEncoded(returnCert);
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage());
}
userCertificate = returnCert;
credential.setOwnerGid(returnString);
}
private void setOwnerURN() {
URN urn = getSubjectUrn();
credential.setOwnerURN(urn.toString());
}
private URN getSubjectUrn() {
try {
return X509Util.getURN(userCertificate);
} catch (CertificateParsingException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private void setTargetGID() {
try {
credential.setTargetGid(X509Util
.getCertificateBodyEncoded(targetCertificate));
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private void setTargetURN() {
credential.setTargetURN(target.toString());
}
private void setExpirationDate() {
GregorianCalendar gregCalendar = new GregorianCalendar();
gregCalendar
.setTimeInMillis(java.lang.System.currentTimeMillis() + 100000);
XMLGregorianCalendar expirationDate = null;
try {
expirationDate = DatatypeFactory.newInstance()
.newXMLGregorianCalendar(gregCalendar);
} catch (DatatypeConfigurationException e) {
throw new RuntimeException(e);
}
credential.setExpires(expirationDate);
}
private void setPrivleges() {
Privileges privileges = new Privileges();
Privilege userPriv = new Privilege();
userPriv.setCanDelegate(false);
userPriv.setName("*");
privileges.getPrivilege().add(userPriv);
credential.setPrivileges(privileges);
}
private URN getTargetURN(URN target) {
URN urn = null;
if (target.getType().equalsIgnoreCase("user")) {
urn = getSliceAuthorityURN();
} else if (target.getType().equalsIgnoreCase("Slice")) {
urn = target;
} else {
throw new UnsupportedTarget();
}
return urn;
}
private URN getSliceAuthorityURN() {
return new URN(InterfaceConfiguration.getInstance().getSA_URN());
}
public class UnsupportedTarget extends RuntimeException {
private static final long serialVersionUID = -7821229625163019933L;
}
public void setGroupManager(GroupDBManager groupManager) {
this.groupManager = groupManager;
}
public void setKeyStoreManager(KeyStoreManagement keyStoreManagement){
this.keyStoreManagement = keyStoreManagement;
}
public Credential getCredential() {
credential = new Credential();
setTargetCertificate();
setId();
setType();
setOwnerGID();
setOwnerURN();
setTargetGID();
setTargetURN();
setExpirationDate();
setPrivleges();
return credential;
}
}
| |
package quickfix.fixt11;
import quickfix.FieldNotFound;
import quickfix.Group;
public class Logon extends Message
{
static final long serialVersionUID = 20050617;
public static final String MSGTYPE = "A";
public Logon()
{
super();
getHeader().setField(new quickfix.field.MsgType(MSGTYPE));
}
public Logon(quickfix.field.EncryptMethod encryptMethod, quickfix.field.HeartBtInt heartBtInt, quickfix.field.DefaultApplVerID defaultApplVerID) {
this();
setField(encryptMethod);
setField(heartBtInt);
setField(defaultApplVerID);
}
public void set(quickfix.field.EncryptMethod value)
{
setField(value);
}
public quickfix.field.EncryptMethod get(quickfix.field.EncryptMethod value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.EncryptMethod getEncryptMethod() throws FieldNotFound
{
quickfix.field.EncryptMethod value = new quickfix.field.EncryptMethod();
getField(value);
return value;
}
public boolean isSet(quickfix.field.EncryptMethod field)
{
return isSetField(field);
}
public boolean isSetEncryptMethod()
{
return isSetField(98);
}
public void set(quickfix.field.HeartBtInt value)
{
setField(value);
}
public quickfix.field.HeartBtInt get(quickfix.field.HeartBtInt value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.HeartBtInt getHeartBtInt() throws FieldNotFound
{
quickfix.field.HeartBtInt value = new quickfix.field.HeartBtInt();
getField(value);
return value;
}
public boolean isSet(quickfix.field.HeartBtInt field)
{
return isSetField(field);
}
public boolean isSetHeartBtInt()
{
return isSetField(108);
}
public void set(quickfix.field.RawDataLength value)
{
setField(value);
}
public quickfix.field.RawDataLength get(quickfix.field.RawDataLength value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.RawDataLength getRawDataLength() throws FieldNotFound
{
quickfix.field.RawDataLength value = new quickfix.field.RawDataLength();
getField(value);
return value;
}
public boolean isSet(quickfix.field.RawDataLength field)
{
return isSetField(field);
}
public boolean isSetRawDataLength()
{
return isSetField(95);
}
public void set(quickfix.field.RawData value)
{
setField(value);
}
public quickfix.field.RawData get(quickfix.field.RawData value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.RawData getRawData() throws FieldNotFound
{
quickfix.field.RawData value = new quickfix.field.RawData();
getField(value);
return value;
}
public boolean isSet(quickfix.field.RawData field)
{
return isSetField(field);
}
public boolean isSetRawData()
{
return isSetField(96);
}
public void set(quickfix.field.ResetSeqNumFlag value)
{
setField(value);
}
public quickfix.field.ResetSeqNumFlag get(quickfix.field.ResetSeqNumFlag value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.ResetSeqNumFlag getResetSeqNumFlag() throws FieldNotFound
{
quickfix.field.ResetSeqNumFlag value = new quickfix.field.ResetSeqNumFlag();
getField(value);
return value;
}
public boolean isSet(quickfix.field.ResetSeqNumFlag field)
{
return isSetField(field);
}
public boolean isSetResetSeqNumFlag()
{
return isSetField(141);
}
public void set(quickfix.field.NextExpectedMsgSeqNum value)
{
setField(value);
}
public quickfix.field.NextExpectedMsgSeqNum get(quickfix.field.NextExpectedMsgSeqNum value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.NextExpectedMsgSeqNum getNextExpectedMsgSeqNum() throws FieldNotFound
{
quickfix.field.NextExpectedMsgSeqNum value = new quickfix.field.NextExpectedMsgSeqNum();
getField(value);
return value;
}
public boolean isSet(quickfix.field.NextExpectedMsgSeqNum field)
{
return isSetField(field);
}
public boolean isSetNextExpectedMsgSeqNum()
{
return isSetField(789);
}
public void set(quickfix.field.MaxMessageSize value)
{
setField(value);
}
public quickfix.field.MaxMessageSize get(quickfix.field.MaxMessageSize value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.MaxMessageSize getMaxMessageSize() throws FieldNotFound
{
quickfix.field.MaxMessageSize value = new quickfix.field.MaxMessageSize();
getField(value);
return value;
}
public boolean isSet(quickfix.field.MaxMessageSize field)
{
return isSetField(field);
}
public boolean isSetMaxMessageSize()
{
return isSetField(383);
}
public void set(quickfix.field.TestMessageIndicator value)
{
setField(value);
}
public quickfix.field.TestMessageIndicator get(quickfix.field.TestMessageIndicator value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.TestMessageIndicator getTestMessageIndicator() throws FieldNotFound
{
quickfix.field.TestMessageIndicator value = new quickfix.field.TestMessageIndicator();
getField(value);
return value;
}
public boolean isSet(quickfix.field.TestMessageIndicator field)
{
return isSetField(field);
}
public boolean isSetTestMessageIndicator()
{
return isSetField(464);
}
public void set(quickfix.field.Username value)
{
setField(value);
}
public quickfix.field.Username get(quickfix.field.Username value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.Username getUsername() throws FieldNotFound
{
quickfix.field.Username value = new quickfix.field.Username();
getField(value);
return value;
}
public boolean isSet(quickfix.field.Username field)
{
return isSetField(field);
}
public boolean isSetUsername()
{
return isSetField(553);
}
public void set(quickfix.field.Password value)
{
setField(value);
}
public quickfix.field.Password get(quickfix.field.Password value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.Password getPassword() throws FieldNotFound
{
quickfix.field.Password value = new quickfix.field.Password();
getField(value);
return value;
}
public boolean isSet(quickfix.field.Password field)
{
return isSetField(field);
}
public boolean isSetPassword()
{
return isSetField(554);
}
public void set(quickfix.field.DefaultApplVerID value)
{
setField(value);
}
public quickfix.field.DefaultApplVerID get(quickfix.field.DefaultApplVerID value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.DefaultApplVerID getDefaultApplVerID() throws FieldNotFound
{
quickfix.field.DefaultApplVerID value = new quickfix.field.DefaultApplVerID();
getField(value);
return value;
}
public boolean isSet(quickfix.field.DefaultApplVerID field)
{
return isSetField(field);
}
public boolean isSetDefaultApplVerID()
{
return isSetField(1137);
}
public void set(quickfix.fixt11.component.MsgTypeGrp component)
{
setComponent(component);
}
public quickfix.fixt11.component.MsgTypeGrp get(quickfix.fixt11.component.MsgTypeGrp component) throws FieldNotFound
{
getComponent(component);
return component;
}
public quickfix.fixt11.component.MsgTypeGrp getMsgTypeGrp() throws FieldNotFound
{
quickfix.fixt11.component.MsgTypeGrp component = new quickfix.fixt11.component.MsgTypeGrp();
getComponent(component);
return component;
}
public void set(quickfix.field.NoMsgTypes value)
{
setField(value);
}
public quickfix.field.NoMsgTypes get(quickfix.field.NoMsgTypes value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.NoMsgTypes getNoMsgTypes() throws FieldNotFound
{
quickfix.field.NoMsgTypes value = new quickfix.field.NoMsgTypes();
getField(value);
return value;
}
public boolean isSet(quickfix.field.NoMsgTypes field)
{
return isSetField(field);
}
public boolean isSetNoMsgTypes()
{
return isSetField(384);
}
public static class NoMsgTypes extends Group {
static final long serialVersionUID = 20050617;
public NoMsgTypes() {
super(384, 372,
new int[] {372, 385, 1130, 1131, 0 } );
}
public void set(quickfix.field.RefMsgType value)
{
setField(value);
}
public quickfix.field.RefMsgType get(quickfix.field.RefMsgType value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.RefMsgType getRefMsgType() throws FieldNotFound
{
quickfix.field.RefMsgType value = new quickfix.field.RefMsgType();
getField(value);
return value;
}
public boolean isSet(quickfix.field.RefMsgType field)
{
return isSetField(field);
}
public boolean isSetRefMsgType()
{
return isSetField(372);
}
public void set(quickfix.field.MsgDirection value)
{
setField(value);
}
public quickfix.field.MsgDirection get(quickfix.field.MsgDirection value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.MsgDirection getMsgDirection() throws FieldNotFound
{
quickfix.field.MsgDirection value = new quickfix.field.MsgDirection();
getField(value);
return value;
}
public boolean isSet(quickfix.field.MsgDirection field)
{
return isSetField(field);
}
public boolean isSetMsgDirection()
{
return isSetField(385);
}
public void set(quickfix.field.RefApplVerID value)
{
setField(value);
}
public quickfix.field.RefApplVerID get(quickfix.field.RefApplVerID value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.RefApplVerID getRefApplVerID() throws FieldNotFound
{
quickfix.field.RefApplVerID value = new quickfix.field.RefApplVerID();
getField(value);
return value;
}
public boolean isSet(quickfix.field.RefApplVerID field)
{
return isSetField(field);
}
public boolean isSetRefApplVerID()
{
return isSetField(1130);
}
public void set(quickfix.field.RefCstmApplVerID value)
{
setField(value);
}
public quickfix.field.RefCstmApplVerID get(quickfix.field.RefCstmApplVerID value) throws FieldNotFound
{
getField(value);
return value;
}
public quickfix.field.RefCstmApplVerID getRefCstmApplVerID() throws FieldNotFound
{
quickfix.field.RefCstmApplVerID value = new quickfix.field.RefCstmApplVerID();
getField(value);
return value;
}
public boolean isSet(quickfix.field.RefCstmApplVerID field)
{
return isSetField(field);
}
public boolean isSetRefCstmApplVerID()
{
return isSetField(1131);
}
}
}
| |
package cc.hrLDA.topics;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.io.*;
import cc.hrLDA.topics.HierarchicalRLDA.NCRPNode;
import cc.mallet.types.*;
import cc.mallet.util.Randoms;
import cc.mallet.util.MySqlDB;
import gnu.trove.*;
import rita.wordnet.RiWordnet;
public class HierarchicalRLDA {
InstanceList instances;
InstanceList testing;
NCRPNode rootNode;
int numLevels;
int numOfThreads;
int numDocuments;
int numTypes;
double alpha; // smoothing on topic distributions
double gamma; // "imaginary" customers at the next, as yet unused table
double eta; // smoothing on word distributions
double etaSum;
HashMap <Integer, HashSet<NCRPNode>> documentLeaves;
RiWordnet wordnet = new RiWordnet(null);
String end="";
MySqlDB mySqlDB;
int[][] tdm;
int currentLevel = 0;
int totalNodes = 0;// total num of NCRP nodes
int numOfPhrases = 0;
int numOfPseudoPhrases = 0;
Randoms random;
boolean showProgress = true;
int displayTopicsInterval = 50;
int numWordsToDisplay = 5;
String stateFilePath = "hslda.state";
String perplexityFilePath = "hSLDA.txt";
File perplexityFile;//
BufferedWriter perplexityBufferedWriter;
File stateFile;//
BufferedWriter stateBufferedWriter;
public HierarchicalRLDA () {
alpha = 10.0;
gamma = 1.0;
eta = 0.1;
}
public void setAlpha(double alpha) {
this.alpha = alpha;
}
public void setGamma(double gamma) {
this.gamma = gamma;
}
public void setEta(double eta) {
this.eta = eta;
}
public void setStateFilePath(String stateFilePath) {
this.stateFilePath = stateFilePath;
}
public void setPerplexityFilePath(String perplexityFilePath) {
this.perplexityFilePath = perplexityFilePath;
}
public void setTopicDisplay(int interval, int words) {
displayTopicsInterval = interval;
numWordsToDisplay = words;
}
public void setEnd(String end){
this.end = end;
}
/**
* This parameter determines whether the sampler outputs
* shows progress by outputting a character after every iteration.
*/
public void setProgressDisplay(boolean showProgress) {
this.showProgress = showProgress;
}
public int getNumOfPhrases() {
return numOfPhrases;
}
public void initialize(InstanceList instances, InstanceList testing,
int numLevels, int numOfThreads, Randoms random,
String databaseName,String userName,String password,
String tableName, String tempRelationTableName) {
this.instances = instances;
this.testing = testing;
this.numLevels = numLevels;
this.numOfThreads = numOfThreads;
this.random = random;
this.mySqlDB= new MySqlDB(databaseName, userName, password, tableName, tempRelationTableName);
if (instances.size() ==0){
System.err.println("Input data is empty");
System.exit(2);
}
if (! (instances.get(0).getData() instanceof FeatureSequence)) {
throw new IllegalArgumentException("Input must be a FeatureSequence, using the --"
+ "feature-sequence option when impoting data, for example");
}
try {
perplexityFile = new File (perplexityFilePath);
stateFile = new File (stateFilePath);
perplexityBufferedWriter = new BufferedWriter(new PrintWriter(perplexityFile));
stateBufferedWriter = new BufferedWriter(new PrintWriter(stateFile));
} catch (FileNotFoundException e) {
System.err.println("Unable to create stateFile and perplexityFile");
}
numDocuments = instances.size();
numTypes = instances.getDataAlphabet().size();
if (numTypes <=0){
System.err.println("no data for the seeds");
System.exit(2);
}
// Initialize paths
etaSum = eta * numTypes;
rootNode = new NCRPNode(numTypes);// word types
NCRPNode node = null;
tdm = new int[numTypes][numDocuments];
documentLeaves = new HashMap <Integer, HashSet<NCRPNode>> ();
// Initialize and fill the topic pointer arrays for
// every document. Set everything to the single path that
// we added earlier.
currentLevel = 0;// 1st level, but since we only assign table and haven't sample topics yet
for (int doc=0; doc < numDocuments; doc++) {
FeatureSequence fs = (FeatureSequence) instances.get(doc).getData();
int seqLen = fs.getLength();
System.out.println("doc: "+doc+": "+instances.get(doc).getName().toString());
HashSet <NCRPNode> currentDocLeaves = new HashSet <NCRPNode>();
for (int token=0; token < seqLen; token++) {
int type = fs.getIndexAtPosition(token);
double weight = fs.getWeightAtPosition(token);
int chunkID =fs.getChunkIDAtPosition (token);
int sentenceID = fs.getSentenceIDAtPosition(token);
Phrase phrase = new Phrase(type, weight, chunkID, sentenceID, doc);
numOfPhrases++;
numOfPseudoPhrases++;
tdm[type][doc] ++;
NCRPNode parentNode = rootNode;
node = parentNode.selectPath(phrase);
node.totalTokens++;
node.typeCounts[type]++;
node.hashSet.add(phrase);
//update doc leaves
currentDocLeaves.add(node);
}
documentLeaves.put(doc, currentDocLeaves);
System.out.println("END OF DOCUMENT "+doc+" ----------------------------------------------------------------");
System.out.println();
}
/**print the initial topic tree*/
/**
System.out.println("rootNode nodeID: "+rootNode.nodeID
+" rootNode nodeLevel: "+rootNode.level
+" rootNode children size: "+rootNode.children.size());
printTree(rootNode);
*/
}
public void estimate(int numIterations) {
double [][] perplexityOfParentLevles = new double [numIterations+1][numLevels+1];
while (currentLevel < numLevels){
currentLevel++;
System.out.println("currentLevel: "+currentLevel);
// assign tables for level [2- numOfLevels+1]
// assigning tale only occurs once for one level
if (currentLevel > 1)
assignTableForAllDocs();
//sample tables
sampleTableForAllDocs(numIterations);
try {
perplexityBufferedWriter.write("numOfPseudoPhrases, "+numOfPseudoPhrases+"\n");
} catch (IOException e1) {
System.err.println("Unable to write numOfPseudoPhrases");
}
//sample topics
for (int iteration = 1; iteration <= numIterations; iteration++) {
for (int doc=0; doc < numDocuments; doc++) {
sampleTopicForOneDoc(doc);
}// end of for (int doc=0; doc < numDocuments; doc++) {
// remove this last level restriction, in order to get the perplexity of the whole tree
if (currentLevel == numLevels){
try {
double perplexityOnLeafLevel = printPerplexityState(currentLevel);
double perplexity = perplexityOnLeafLevel;
double [] perplexityOfParentInAllLevles = perplexityOfParentLevles [iteration];
for (double pp : perplexityOfParentInAllLevles){
perplexity += pp;
}
if (iteration < displayTopicsInterval || iteration % displayTopicsInterval == 0) {
//? numOfPseudoPhrases or numOfPhrases
System.out.println("perplexity: "+perplexity/numOfPseudoPhrases);
// System.out.println("perplexity: "+perplexity/numOfPhrases);
perplexityBufferedWriter.write(iteration+", "+perplexity/numOfPseudoPhrases+"\n");
// perplexityBufferedWriter.write(iteration+", "+perplexity/numOfPhrases+"\n");
perplexityBufferedWriter.flush();
}// end of if (iteration % displayTopicsInterval == 0) {
} catch (IOException e) {
System.out.println("Unable to calculate the perplexity of leaf levle "
+"or unable to write the current perplexity");
}// end of try catch{}
}else{
try {
perplexityOfParentLevles [iteration][currentLevel] =
printPerplexityState(currentLevel);
} catch (IOException e) {
System.out.println("Unable to calculate the perplexity of parent Levels ");
}// end of try catch{}
}
}// end of for (int iteration = 1; iteration <= numIterations; iteration++) {
}//while (currentLevel < numLevels){
/**print the final state*/
printNodes();
System.out.println("rootNode nodeID: "+rootNode.nodeID
+" rootNode nodeLevel: "+rootNode.level
+" rootNode children size: "+rootNode.children.size());
printTree(rootNode);
}// end of estimate(int numIterations)
public void assignTableForOneDoc (int doc){
HashSet <NCRPNode> oddCurrentDocLeaves = documentLeaves.get(doc);
HashSet <NCRPNode> currentDocLeaves = new HashSet <NCRPNode>();
for (NCRPNode leafnode : oddCurrentDocLeaves){
assignTable(leafnode, currentDocLeaves, doc);
}
documentLeaves.put(doc, currentDocLeaves);
}
/*assign tables to from level [1: numOfLevels]*/
public void assignTable(NCRPNode leafnode, HashSet <NCRPNode> currentDocLeaves, int doc){
NCRPNode node = null;
int type;
int topWordType = leafnode.getTopWordType();
HashSet <Phrase> phraseHashSet = leafnode.hashSet;
HashSet <Phrase> removephraseHashSet = new HashSet <Phrase>();
for (Phrase phrase: phraseHashSet){
if (phrase.getDocID() == doc){
type = phrase.getType();
if (type != topWordType){
numOfPseudoPhrases++;
node = leafnode.selectPath(phrase);
node.totalTokens++;
node.typeCounts[type]++;
node.hashSet.add(phrase);
removephraseHashSet.add(phrase);
//update doc leaves
currentDocLeaves.add(node);
}
}
}// end of for (Phrase phrase: phraseHashSet){
for (Phrase phrase: removephraseHashSet){
type = phrase.getType();
leafnode.totalTokens--;
leafnode.typeCounts[type]--;
leafnode.hashSet.remove(phrase);
}
}// end of arrangeTable
public void sampleTableForAllDocs(int numIterations){
boolean quit = false;
boolean [] quitCurrentDoc = new boolean [numDocuments];
for (int iteration = 1; iteration <= numIterations || quit; iteration++) {
if (!quit){
for (int doc=0; doc < numDocuments; doc++) {
if (!quitCurrentDoc[doc]){
HashSet <NCRPNode> oddCurrentDocLeaves = documentLeaves.get(doc);
sampleTableForOneDoc(doc);
HashSet <NCRPNode> newCurrentDocLeaves = documentLeaves.get(doc);
if (oddCurrentDocLeaves.size() == newCurrentDocLeaves.size()) {
quitCurrentDoc[doc] = true;
}
}// end of if (!quitCurrentDoc[doc])
}// end of for (int doc=0; doc < numDocuments; doc++) {
quit = true;
for (boolean q : quitCurrentDoc){quit = quit & q;}
}else {
System.out.println("QUIT FROM numIterations: " +iteration);
break;
}// end of if (!quit){
}// end of for (int iteration = 1; iteration <= numIterations || quit; iteration++)
System.out.println("QUIT VALUE: " +quit);
}// end of sampleTableForAllDocs(int numIterations)
public void assignTableForAllDocs(){
for (int doc=0; doc < numDocuments; doc++) {
assignTableForOneDoc(doc);
}
}// end of assignTableForAllDocs()
public void sampleTableForOneDoc (int doc){
HashSet <NCRPNode> oddCurrentDocLeaves = documentLeaves.get(doc);
HashSet <NCRPNode> currentDocLeaves = new HashSet <NCRPNode>();
for (NCRPNode leafnode : oddCurrentDocLeaves){
sampleTable(leafnode, currentDocLeaves, doc);
}
documentLeaves.put(doc, currentDocLeaves);
}
public void sampleTable(NCRPNode leafnode, HashSet <NCRPNode> currentDocLeaves, int doc){
NCRPNode node = null;
int type;
HashSet <Phrase> phraseHashSet = leafnode.hashSet;
HashMap <Phrase, NCRPNode> phraseToNode = new HashMap <Phrase, NCRPNode>();
for (Phrase phrase: phraseHashSet){
if (phrase.getDocID() == doc){
phraseToNode.put(phrase, leafnode);
}
}// end of for (Phrase phrase: phraseHashSet){
for (Phrase phrase: phraseToNode.keySet()){
node = phraseToNode.get(phrase);
type = phrase.getType();
node.hashSet.remove(phrase);
node.typeCounts[type]--;
node.totalTokens--;
NCRPNode oldNode = node;
node = leafnode.parent.selectPath(phrase);
node.totalTokens++;
node.typeCounts[type]++;
node.hashSet.add(phrase);
//update doc leaves
currentDocLeaves.add(node);
if (oldNode.totalTokens == 0) oldNode.clean();
}// end of for (Phrase phrase: phraseHashSet){
}// end of arrangeTable
public void sampleTopicForOneDoc(int doc) {
HashSet <NCRPNode> oddCurrentDocLeaves = documentLeaves.get(doc);
HashSet <NCRPNode> currentDocLeaves = new HashSet <NCRPNode>();
NCRPNode node;
int type;
double sum;
HashMap <NCRPNode, Integer> leafCounts = new HashMap <NCRPNode, Integer>();
HashMap <NCRPNode, Double> leafWeights = new HashMap <NCRPNode, Double>();
HashMap <Phrase, NCRPNode> phraseToNode = new HashMap <Phrase, NCRPNode>();
for (NCRPNode leafnode : oddCurrentDocLeaves){
HashSet <Phrase> phraseHashSet = leafnode.hashSet;
for (Phrase phrase: phraseHashSet){
if (phrase.getDocID() == doc){
// Initialize leaf counts
phraseToNode.put(phrase, leafnode);
if (! leafCounts.containsKey(leafnode)) {
leafCounts.put(leafnode, 1);
}
else {
leafCounts.put(leafnode, leafCounts.get(leafnode) + 1);
}
}
}//end of for (Phrase phrase: phraseArrayList){
}//end of for (NCRPNode leafnode : oddCurrentDocLeaves){
// sum of all leaf weights; for normalization
sum = 0.0;
// sampling...
for (Phrase phrase: phraseToNode.keySet()){
node = phraseToNode.get(phrase);
type = phrase.getType();
leafCounts.put(node, leafCounts.get(node) - 1);
node.hashSet.remove(phrase);
node.typeCounts[type]--;
node.totalTokens--;
NCRPNode oldNode = node;
// calculate the current leaf weight
for (NCRPNode leafnode : node.parent.children){
if ( leafCounts.get(leafnode)!= null && leafnode != null){
double weightValue = (alpha + leafCounts.get(leafnode) *
(eta + leafnode.typeCounts[type]) /
(etaSum + leafnode.totalTokens));
leafWeights.put (leafnode,
weightValue);
sum += leafWeights.get(leafnode);
}
}// end of for (NCRPNode leafnode : node.parent.children){
node = (NCRPNode)random.nextDiscrete(leafWeights, sum);
if (node != null){
node.hashSet.add(phrase);
node.typeCounts[type]++;
node.totalTokens++;
if (! leafCounts.containsKey(node)) {
leafCounts.put(node, 1);
}else {
leafCounts.put(node, leafCounts.get(node) + 1);
}
if (!currentDocLeaves.contains(node)){
currentDocLeaves.add(node);
}
}else{
//never happen?
oldNode.hashSet.add(phrase);
oldNode.typeCounts[type]++;
oldNode.totalTokens++;
if (! leafCounts.containsKey(oldNode)) {
leafCounts.put(oldNode, 1);
}else {
leafCounts.put(oldNode, leafCounts.get(node) + 1);
}
if (!currentDocLeaves.contains(oldNode)){
currentDocLeaves.add(oldNode);
}
}// end of if (node != null)
if (oldNode.totalTokens ==0) {
oldNode.clean();
}
}//end of for (NCRPNode leafnode : oddCurrentDocLeaves){
documentLeaves.put(doc, currentDocLeaves);
}//end of sampleTopics(int doc)
/**
* Write a text file describing the current sampling state.
*/
public double printPerplexityState(int level) throws IOException {
int doc = 0;
double sumPerplexity =0.0;
for (Instance instance: instances) {
FeatureSequence fs = (FeatureSequence) instance.getData();
int seqLen = fs.getLength();
HashSet <NCRPNode> oddCurrentDocLeaves = documentLeaves.get(doc);
int type;
//seqLen ?sumCountsInLeaves, we need to think about it.
double sumCountsInLeaves = 0.0;
HashMap <NCRPNode, Integer> wordCountsInCurrentLeaf = new HashMap <NCRPNode, Integer>();
for (NCRPNode leafnode : oddCurrentDocLeaves){
HashSet <Phrase> phraseHashSet = leafnode.hashSet;
for (Phrase phrase: phraseHashSet){
if (phrase.getDocID() == doc){
//seqLen ?sumCountsInLeaves, we need to think about it.
sumCountsInLeaves++;
if (! wordCountsInCurrentLeaf.containsKey(leafnode)) {
wordCountsInCurrentLeaf.put(leafnode, 1);
}
else {
wordCountsInCurrentLeaf.put(leafnode, wordCountsInCurrentLeaf.get(leafnode) + 1);
}
}
}//end of for (Phrase phrase: phraseArrayList){
}//end of for (NCRPNode leafnode : oddCurrentDocLeaves){
for (NCRPNode leafnode : oddCurrentDocLeaves){
if (wordCountsInCurrentLeaf.get(leafnode) != null){
// System.out.println("wordCountsInCurrentLeaf.get(leafnode): "+wordCountsInCurrentLeaf.get(leafnode)
// + " sumCountsInLeaves"+sumCountsInLeaves);
double topicWeightInCurrentDoc = (double)wordCountsInCurrentLeaf
.get(leafnode)/sumCountsInLeaves; //seqLen ?sumCountsInLeaves, we need to think about it.
HashSet <Phrase> phraseHashSet = leafnode.hashSet;
for (Phrase phrase: phraseHashSet){
if (phrase.getDocID() == doc){
// Initialize leaf counts
type = phrase.getType();
if (level < numLevels-1){
// change to calculate all words in parent levels though they will disappear in child levels
// if (type == leafnode.getTopWordType()){
double typeWeightInTopic = (double) leafnode.typeCounts[type]/ leafnode.totalTokens;
// System.out.println("typeWeightInTopic: "+typeWeightInTopic
// + " topicWeightInCurrentDoc: "+topicWeightInCurrentDoc);
sumPerplexity += - Math.log(topicWeightInCurrentDoc * typeWeightInTopic);
// }else{}
}else{
double typeWeightInTopic = (double) leafnode.typeCounts[type]/ leafnode.totalTokens;
// System.out.println(level+ " typeWeightInTopic: "+typeWeightInTopic
// + " topicWeightInCurrentDoc: "+topicWeightInCurrentDoc);
sumPerplexity += - Math.log(topicWeightInCurrentDoc * typeWeightInTopic);
}
}
}//end of for (Phrase phrase: phraseArrayList){
}
}//end of for (NCRPNode leafnode : oddCurrentDocLeaves){
doc++;
}
return sumPerplexity;
}
/**
* Writes the current sampling state to the file specified in <code>stateFile</code>.
*/
public void printNodes() {
printNode(rootNode, 0);
}
public void printNode(NCRPNode node, int indent) {
StringBuffer out = new StringBuffer();
for (int i=0; i<indent; i++) {
out.append(" ");
}
out.append("totalTokens: "+node.totalTokens + "/" + " ");
out.append(node.getAllTopWords());
try {
stateBufferedWriter.write(out.toString()+"\n");
stateBufferedWriter.flush();
} catch (IOException e) {
System.err.println("unable to write current topic disctribution state");
}
for (NCRPNode child: node.children) {
printNode(child, indent + 1);
}
}
public void printTree (NCRPNode parentNode){
for (NCRPNode node: parentNode.children){
int tableIDAtSomeLevel = node.getTableID();
System.out.println("nodeID: "+node.nodeID
+" nodeLevel: "+node.level+ " tableIDAtSomeLevel: " +tableIDAtSomeLevel
+" parent nodeID: "+node.parent.nodeID
+" children size: "+node.children.size()
+" nodeTotalTokens: "+node.totalTokens
+" topWords "+node.getAllTopWords());
printTree (node);
}
}
/**
* This method is primarily for testing purposes. The {@link cc.mallet.topics.tui.HierarchicalLDATUI}
* class has a more flexible interface for command-line use.
*/
public static void main (String[] args) {
try {
InstanceList instances = InstanceList.load(new File(args[0]));
InstanceList testing = InstanceList.load(new File(args[1]));
HierarchicalRLDA sampler = new HierarchicalRLDA();
sampler.initialize(instances, testing, 5, 1, new Randoms(),args[2],args[3],args[4],args[5], args[6]);
sampler.estimate(250);
} catch (Exception e) {
e.printStackTrace();
}
}
public class NCRPNode {
//int customers;
ArrayList<NCRPNode> children;
NCRPNode parent;
int level;
int tableID;
int totalTokens;
int[] typeCounts;
public HashSet <Phrase> hashSet;
public int nodeID;
public NCRPNode(NCRPNode parent, int dimensions, int level) {
//this.customers = 0;
this.parent = parent;
this.children = new ArrayList<NCRPNode>();
this.level = level;
this.totalTokens = 0;
this.typeCounts = new int[dimensions];
this.hashSet = new HashSet<Phrase>();
this.nodeID = totalNodes;
totalNodes++;
}
public HashSet<Phrase> getHashSet() {
return hashSet;
}
public void setHashSet(HashSet<Phrase> hashSet) {
this.hashSet = hashSet;
}
public int getTableID() {
return this.tableID;
}
public void setTableID(int tableID) {
this.tableID = tableID;
}
public NCRPNode(int dimensions) {
this(null, dimensions, 0);
}
public NCRPNode addChild() {
NCRPNode node = new NCRPNode(this, typeCounts.length, level + 1);
this.children.add(node);
node.setTableID(this.children.size()-1);
return node;
}
public boolean isLeaf() {
return this.level == numLevels - 1;
}
public NCRPNode getNewLeaf() {
NCRPNode node = this;
for (int l=level; l<numLevels - 1; l++) {
node = node.addChild();
}
return node;
}
public void clean() {
NCRPNode node = this;
node.parent.remove(node);
NCRPNode parent = node.parent;
// System.out.println("current child ID: "+node.nodeID);
node = null;
// for (NCRPNode n: parent.children){
// System.out.print(n.nodeID+" ");
// }
// System.out.println();
}
public void remove(NCRPNode child) {
this.children.remove(child);
// System.out.print(this.nodeID + " remove "+child.nodeID+" current child: ");
// for (NCRPNode n: this.children){
// System.out.print(n.nodeID+" ");
// }
// System.out.println();
}
/**need fixing*/
public NCRPNode selectExisting() {
double[] weights = new double[children.size()];
int i = 0;
for (NCRPNode child: children) {
weights[i] = (double) child.totalTokens / (gamma + this.totalTokens);
i++;
}
int choice = random.nextDiscrete(weights);
return children.get(choice);
}
public double getMap(NCRPNode node, Phrase phrase){
double probably = 0.0;
HashSet <Phrase> phraseList = node.getHashSet();
for (Phrase phraseInst : phraseList){
double tempProbably = 0.0;
if (phraseInst.getType() == phrase.getType()){
probably = 1.0 - gamma;
break;
}else{
if (phraseInst.getDocID() == phrase.getDocID()){
if (phraseInst.getChunkID() == phrase.getChunkID()){// in the same chunk
tempProbably =(double) (node.totalTokens - (1 - 1 / Math.abs(1+ gamma + phraseInst.getSentenceID() - phrase.getSentenceID()))) /
(gamma + this.totalTokens);
}else{
tempProbably =(double)(node.totalTokens - (1 - 1 / Math.abs(1+ gamma + phraseInst.getSentenceID() - phrase.getSentenceID()))) /
(gamma + (1 + Math.abs(phraseInst.getChunkID() - phrase.getChunkID())) * this.totalTokens);
}
probably = (tempProbably >probably) ? tempProbably: probably;
}else
probably = gamma;//distance = ---> MAX_VALUE
}
}
return probably;
}
public NCRPNode selectPath(Phrase phrase) {
double[] weights = new double[this.children.size() + 1];
weights[0] = gamma / (gamma + this.totalTokens);
int i = 1;
double sumWeight = weights[0];
for (NCRPNode child: children) {
weights[i] = getMap(child, phrase);
// has been changed to find the closest word
sumWeight += weights[i];
i++;
}
int choice = random.nextDiscrete(weights, sumWeight);
if (choice == 0) {
return(addChild());
}
else {
return this.children.get(choice - 1);
}
}
public ArrayList <String []> getAllTopHashWords() {
if(numTypes > 0){
ArrayList <String []> result = new ArrayList <String []>();
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
Alphabet alphabet = instances.getDataAlphabet();
for (int i=0; i<notNullTypeCounts; i++) {
String [] topcWithType= new String [2];
topcWithType[0] = alphabet.lookupObject(sortedTypes[i].getID())+"";
topcWithType[1] = sortedTypes[i].getID()+"";
//System.out.println("NEW PRINTING ... "+topcWithType[0]);
result.add(topcWithType);
}
return result;
}else
return null;
}else{
System.out.println("No enough topic words!");
return null;
}
}
public String[] getTopHashWords(HashSet<String> topiclist) {
if(numTypes > 0){
String [] topcWithType= new String [2];
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
Alphabet alphabet = instances.getDataAlphabet();
String topic="";
int i=0;
do {
topic = alphabet.lookupObject(sortedTypes[i].getID())+"";
i++;
}while(topiclist.contains(topic) && (i < numTypes) && (i<100));
if (topiclist.contains(topic)){
return null;
}else{
topcWithType[0] = topic;
topcWithType[1] = sortedTypes[i-1].getID()+"";
return topcWithType;
}
}else
return null;
}else{
System.out.println("No enough topic words on current level!");
return null;
}
}
public int getTopWordType (){
if(numTypes > 0){
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
return sortedTypes[0].getID();
}else
return 0;
}else{
System.out.println("No enough topic words!");
return 0;
}
}
public String[] getTopWords() {
if(numTypes > 0){
String [] topcWithType= new String [2];
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
Alphabet alphabet = instances.getDataAlphabet();
StringBuffer out = new StringBuffer();
out.append(alphabet.lookupObject(sortedTypes[0].getID()) + " ");
topcWithType[0] = out.toString();
topcWithType[1] = sortedTypes[0].getID()+"";
return topcWithType;
}else
return null;
}else{
System.out.println("No enough topic words!");
return null;
}
}
public String getTopWords(int numWords) {
if(numTypes > 0){
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
Alphabet alphabet = instances.getDataAlphabet();
StringBuffer out = new StringBuffer();
int iteration = (numWords < notNullTypeCounts)? numWords : notNullTypeCounts;
for (int i=0; i<iteration; i++) {
out.append(alphabet.lookupObject(sortedTypes[i].getID()) + " ");
}
return out.toString();
}else
return null;
}else{
System.out.println("No enough topic words!");
return null;
}
}
public String getAllTopWords() {
if(numTypes > 0){
StringBuffer out = new StringBuffer();
IDSorter[] sortedTypes = new IDSorter[numTypes];
int notNullTypeCounts = 0;
for (int type=0; type < numTypes; type++) {
sortedTypes[type] = new IDSorter(type, this.typeCounts[type]);
if (this.typeCounts[type]>0){
notNullTypeCounts++;
}
}
if (notNullTypeCounts >0){
Arrays.sort(sortedTypes);
Alphabet alphabet = instances.getDataAlphabet();
int iteration = notNullTypeCounts;
for (int i=0; i<iteration; i++) {
out.append(alphabet.lookupObject(sortedTypes[i].getID()) + " ");
}
return out.toString();
}else
return null;
}else{
System.out.println("No enough topic words!");
return null;
}
}
public ArrayList<String> getDocs(int type) {
ArrayList<String> arraylist= new ArrayList<String>();
IDSorter[] sortedTypes = new IDSorter[numDocuments];
for (int doc=0; doc < numDocuments; doc++) {
sortedTypes[doc] = new IDSorter(doc, tdm[type][doc]);
}
Arrays.sort(sortedTypes);
for (IDSorter sTypes : sortedTypes){
if (tdm[type][sTypes.getID()]>0){
arraylist.add(instances.get(sTypes.getID()).getName().toString());
}
}
return arraylist;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress.zlib;
import static org.junit.Assert.*;
import static org.junit.Assume.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.Random;
import java.util.zip.DeflaterOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.compress.CompressDecompressTester;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.DecompressorStream;
import org.apache.hadoop.io.compress.CompressDecompressTester.CompressionTestStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor;
import org.apache.hadoop.test.MultithreadedTestUtil;
import org.apache.hadoop.util.NativeCodeLoader;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.ImmutableSet;
public class TestZlibCompressorDecompressor {
private static final Random random = new Random(12345L);
@Before
public void before() {
assumeTrue(ZlibFactory.isNativeZlibLoaded(new Configuration()));
}
@Test
public void testZlibCompressorDecompressor() {
try {
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new ZlibCompressor(), new ZlibDecompressor())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
int BYTE_SIZE = 100 * 1024;
byte[] rawData = generate(BYTE_SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(
new ZlibCompressor(
org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel.BEST_COMPRESSION,
CompressionStrategy.DEFAULT_STRATEGY,
org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader.DEFAULT_HEADER,
BYTE_SIZE),
new ZlibDecompressor(
org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader.DEFAULT_HEADER,
BYTE_SIZE))
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
}
}
@Test
public void testZlibCompressorDecompressorWithConfiguration() {
Configuration conf = new Configuration();
if (ZlibFactory.isNativeZlibLoaded(conf)) {
byte[] rawData;
int tryNumber = 5;
int BYTE_SIZE = 10 * 1024;
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
rawData = generate(BYTE_SIZE);
try {
for (int i = 0; i < tryNumber; i++)
compressDecompressZlib(rawData, (ZlibCompressor) zlibCompressor,
(ZlibDecompressor) zlibDecompressor);
zlibCompressor.reinit(conf);
} catch (Exception ex) {
fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex);
}
} else {
assertTrue("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
}
}
@Test
public void testZlibCompressDecompress() {
byte[] rawData = null;
int rawDataSize = 0;
rawDataSize = 1024 * 64;
rawData = generate(rawDataSize);
try {
ZlibCompressor compressor = new ZlibCompressor();
ZlibDecompressor decompressor = new ZlibDecompressor();
assertFalse("testZlibCompressDecompress finished error",
compressor.finished());
compressor.setInput(rawData, 0, rawData.length);
assertTrue("testZlibCompressDecompress getBytesRead before error",
compressor.getBytesRead() == 0);
compressor.finish();
byte[] compressedResult = new byte[rawDataSize];
int cSize = compressor.compress(compressedResult, 0, rawDataSize);
assertTrue("testZlibCompressDecompress getBytesRead ather error",
compressor.getBytesRead() == rawDataSize);
assertTrue(
"testZlibCompressDecompress compressed size no less then original size",
cSize < rawDataSize);
decompressor.setInput(compressedResult, 0, cSize);
byte[] decompressedBytes = new byte[rawDataSize];
decompressor.decompress(decompressedBytes, 0, decompressedBytes.length);
assertArrayEquals("testZlibCompressDecompress arrays not equals ",
rawData, decompressedBytes);
compressor.reset();
decompressor.reset();
} catch (IOException ex) {
fail("testZlibCompressDecompress ex !!!" + ex);
}
}
private void compressDecompressLoop(int rawDataSize) throws IOException {
byte[] rawData = null;
rawData = generate(rawDataSize);
ByteArrayOutputStream baos = new ByteArrayOutputStream(rawDataSize+12);
DeflaterOutputStream dos = new DeflaterOutputStream(baos);
dos.write(rawData);
dos.flush();
dos.close();
byte[] compressedResult = baos.toByteArray();
int compressedSize = compressedResult.length;
ZlibDirectDecompressor decompressor = new ZlibDirectDecompressor();
ByteBuffer inBuf = ByteBuffer.allocateDirect(compressedSize);
ByteBuffer outBuf = ByteBuffer.allocateDirect(rawDataSize);
inBuf.put(compressedResult, 0, compressedSize);
inBuf.flip();
ByteBuffer expected = ByteBuffer.wrap(rawData);
outBuf.clear();
while(!decompressor.finished()) {
decompressor.decompress(inBuf, outBuf);
if (outBuf.remaining() == 0) {
outBuf.flip();
while (outBuf.remaining() > 0) {
assertEquals(expected.get(), outBuf.get());
}
outBuf.clear();
}
}
outBuf.flip();
while (outBuf.remaining() > 0) {
assertEquals(expected.get(), outBuf.get());
}
outBuf.clear();
assertEquals(0, expected.remaining());
}
@Test
public void testZlibDirectCompressDecompress() {
int[] size = { 1, 4, 16, 4 * 1024, 64 * 1024, 128 * 1024, 1024 * 1024 };
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
try {
for (int i = 0; i < size.length; i++) {
compressDecompressLoop(size[i]);
}
} catch (IOException ex) {
fail("testZlibDirectCompressDecompress ex !!!" + ex);
}
}
@Test
public void testZlibCompressorDecompressorSetDictionary() {
Configuration conf = new Configuration();
if (ZlibFactory.isNativeZlibLoaded(conf)) {
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
checkSetDictionaryNullPointerException(zlibCompressor);
checkSetDictionaryNullPointerException(zlibDecompressor);
checkSetDictionaryArrayIndexOutOfBoundsException(zlibDecompressor);
checkSetDictionaryArrayIndexOutOfBoundsException(zlibCompressor);
} else {
assertTrue("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
}
}
@Test
public void testZlibFactory() {
Configuration cfg = new Configuration();
assertTrue("testZlibFactory compression level error !!!",
CompressionLevel.DEFAULT_COMPRESSION == ZlibFactory
.getCompressionLevel(cfg));
assertTrue("testZlibFactory compression strategy error !!!",
CompressionStrategy.DEFAULT_STRATEGY == ZlibFactory
.getCompressionStrategy(cfg));
ZlibFactory.setCompressionLevel(cfg, CompressionLevel.BEST_COMPRESSION);
assertTrue("testZlibFactory compression strategy error !!!",
CompressionLevel.BEST_COMPRESSION == ZlibFactory
.getCompressionLevel(cfg));
ZlibFactory.setCompressionStrategy(cfg, CompressionStrategy.FILTERED);
assertTrue("testZlibFactory compression strategy error !!!",
CompressionStrategy.FILTERED == ZlibFactory.getCompressionStrategy(cfg));
}
private boolean checkSetDictionaryNullPointerException(
Decompressor decompressor) {
try {
decompressor.setDictionary(null, 0, 1);
} catch (NullPointerException ex) {
return true;
} catch (Exception ex) {
}
return false;
}
private boolean checkSetDictionaryNullPointerException(Compressor compressor) {
try {
compressor.setDictionary(null, 0, 1);
} catch (NullPointerException ex) {
return true;
} catch (Exception ex) {
}
return false;
}
private boolean checkSetDictionaryArrayIndexOutOfBoundsException(
Compressor compressor) {
try {
compressor.setDictionary(new byte[] { (byte) 0 }, 0, -1);
} catch (ArrayIndexOutOfBoundsException e) {
return true;
} catch (Exception e) {
}
return false;
}
private boolean checkSetDictionaryArrayIndexOutOfBoundsException(
Decompressor decompressor) {
try {
decompressor.setDictionary(new byte[] { (byte) 0 }, 0, -1);
} catch (ArrayIndexOutOfBoundsException e) {
return true;
} catch (Exception e) {
}
return false;
}
private byte[] compressDecompressZlib(byte[] rawData,
ZlibCompressor zlibCompressor, ZlibDecompressor zlibDecompressor)
throws IOException {
int cSize = 0;
byte[] compressedByte = new byte[rawData.length];
byte[] decompressedRawData = new byte[rawData.length];
zlibCompressor.setInput(rawData, 0, rawData.length);
zlibCompressor.finish();
while (!zlibCompressor.finished()) {
cSize = zlibCompressor.compress(compressedByte, 0, compressedByte.length);
}
zlibCompressor.reset();
assertTrue(zlibDecompressor.getBytesWritten() == 0);
assertTrue(zlibDecompressor.getBytesRead() == 0);
assertTrue(zlibDecompressor.needsInput());
zlibDecompressor.setInput(compressedByte, 0, cSize);
assertFalse(zlibDecompressor.needsInput());
while (!zlibDecompressor.finished()) {
zlibDecompressor.decompress(decompressedRawData, 0,
decompressedRawData.length);
}
assertTrue(zlibDecompressor.getBytesWritten() == rawData.length);
assertTrue(zlibDecompressor.getBytesRead() == cSize);
zlibDecompressor.reset();
assertTrue(zlibDecompressor.getRemaining() == 0);
assertArrayEquals(
"testZlibCompressorDecompressorWithConfiguration array equals error",
rawData, decompressedRawData);
return decompressedRawData;
}
@Test
public void testBuiltInGzipDecompressorExceptions() {
BuiltInGzipDecompressor decompresser = new BuiltInGzipDecompressor();
try {
decompresser.setInput(null, 0, 1);
} catch (NullPointerException ex) {
// expected
} catch (Exception ex) {
fail("testBuiltInGzipDecompressorExceptions npe error " + ex);
}
try {
decompresser.setInput(new byte[] { 0 }, 0, -1);
} catch (ArrayIndexOutOfBoundsException ex) {
// expected
} catch (Exception ex) {
fail("testBuiltInGzipDecompressorExceptions aioob error" + ex);
}
assertTrue("decompresser.getBytesRead error",
decompresser.getBytesRead() == 0);
assertTrue("decompresser.getRemaining error",
decompresser.getRemaining() == 0);
decompresser.reset();
decompresser.end();
InputStream decompStream = null;
try {
// invalid 0 and 1 bytes , must be 31, -117
int buffSize = 1 * 1024;
byte buffer[] = new byte[buffSize];
Decompressor decompressor = new BuiltInGzipDecompressor();
DataInputBuffer gzbuf = new DataInputBuffer();
decompStream = new DecompressorStream(gzbuf, decompressor);
gzbuf.reset(new byte[] { 0, 0, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
decompStream.read(buffer);
} catch (IOException ioex) {
// expected
} catch (Exception ex) {
fail("invalid 0 and 1 byte in gzip stream" + ex);
}
// invalid 2 byte, must be 8
try {
int buffSize = 1 * 1024;
byte buffer[] = new byte[buffSize];
Decompressor decompressor = new BuiltInGzipDecompressor();
DataInputBuffer gzbuf = new DataInputBuffer();
decompStream = new DecompressorStream(gzbuf, decompressor);
gzbuf.reset(new byte[] { 31, -117, 7, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
decompStream.read(buffer);
} catch (IOException ioex) {
// expected
} catch (Exception ex) {
fail("invalid 2 byte in gzip stream" + ex);
}
try {
int buffSize = 1 * 1024;
byte buffer[] = new byte[buffSize];
Decompressor decompressor = new BuiltInGzipDecompressor();
DataInputBuffer gzbuf = new DataInputBuffer();
decompStream = new DecompressorStream(gzbuf, decompressor);
gzbuf.reset(new byte[] { 31, -117, 8, -32, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
decompStream.read(buffer);
} catch (IOException ioex) {
// expected
} catch (Exception ex) {
fail("invalid 3 byte in gzip stream" + ex);
}
try {
int buffSize = 1 * 1024;
byte buffer[] = new byte[buffSize];
Decompressor decompressor = new BuiltInGzipDecompressor();
DataInputBuffer gzbuf = new DataInputBuffer();
decompStream = new DecompressorStream(gzbuf, decompressor);
gzbuf.reset(new byte[] { 31, -117, 8, 4, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
decompStream.read(buffer);
} catch (IOException ioex) {
// expected
} catch (Exception ex) {
fail("invalid 3 byte make hasExtraField" + ex);
}
}
public static byte[] generate(int size) {
byte[] data = new byte[size];
for (int i = 0; i < size; i++)
data[i] = (byte)random.nextInt(16);
return data;
}
@Test
public void testZlibCompressDecompressInMultiThreads() throws Exception {
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext();
for(int i=0;i<10;i++) {
ctx.addThread( new MultithreadedTestUtil.TestingThread(ctx) {
@Override
public void doWork() throws Exception {
testZlibCompressDecompress();
}
});
}
ctx.startThreads();
ctx.waitFor(60000);
}
}
| |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.cache;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.spi.CachingProvider;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.AnyNestedCondition;
import org.springframework.boot.autoconfigure.condition.ConditionMessage;
import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnSingleCandidate;
import org.springframework.boot.autoconfigure.condition.SpringBootCondition;
import org.springframework.cache.jcache.JCacheCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import org.springframework.core.annotation.Order;
import org.springframework.core.io.Resource;
import org.springframework.core.type.AnnotatedTypeMetadata;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
/**
* Cache configuration for JSR-107 compliant providers.
*
* @author Stephane Nicoll
* @author Madhura Bhave
* @since 1.3.0
*/
@Configuration
@ConditionalOnClass({ Caching.class, JCacheCacheManager.class })
@ConditionalOnMissingBean(org.springframework.cache.CacheManager.class)
@Conditional({ CacheCondition.class,
JCacheCacheConfiguration.JCacheAvailableCondition.class })
@Import(HazelcastJCacheCustomizationConfiguration.class)
class JCacheCacheConfiguration {
private final CacheProperties cacheProperties;
private final CacheManagerCustomizers customizers;
private final javax.cache.configuration.Configuration<?, ?> defaultCacheConfiguration;
private final List<JCacheManagerCustomizer> cacheManagerCustomizers;
private final List<JCachePropertiesCustomizer> cachePropertiesCustomizers;
JCacheCacheConfiguration(CacheProperties cacheProperties,
CacheManagerCustomizers customizers,
ObjectProvider<javax.cache.configuration.Configuration<?, ?>> defaultCacheConfiguration,
ObjectProvider<List<JCacheManagerCustomizer>> cacheManagerCustomizers,
ObjectProvider<List<JCachePropertiesCustomizer>> cachePropertiesCustomizers) {
this.cacheProperties = cacheProperties;
this.customizers = customizers;
this.defaultCacheConfiguration = defaultCacheConfiguration.getIfAvailable();
this.cacheManagerCustomizers = cacheManagerCustomizers.getIfAvailable();
this.cachePropertiesCustomizers = cachePropertiesCustomizers.getIfAvailable();
}
@Bean
public JCacheCacheManager cacheManager(CacheManager jCacheCacheManager) {
JCacheCacheManager cacheManager = new JCacheCacheManager(jCacheCacheManager);
return this.customizers.customize(cacheManager);
}
@Bean
@ConditionalOnMissingBean
public CacheManager jCacheCacheManager() throws IOException {
CacheManager jCacheCacheManager = createCacheManager();
List<String> cacheNames = this.cacheProperties.getCacheNames();
if (!CollectionUtils.isEmpty(cacheNames)) {
for (String cacheName : cacheNames) {
jCacheCacheManager.createCache(cacheName, getDefaultCacheConfiguration());
}
}
customize(jCacheCacheManager);
return jCacheCacheManager;
}
private CacheManager createCacheManager() throws IOException {
CachingProvider cachingProvider = getCachingProvider(
this.cacheProperties.getJcache().getProvider());
Properties properties = createCacheManagerProperties();
Resource configLocation = this.cacheProperties
.resolveConfigLocation(this.cacheProperties.getJcache().getConfig());
if (configLocation != null) {
return cachingProvider.getCacheManager(configLocation.getURI(),
cachingProvider.getDefaultClassLoader(), properties);
}
return cachingProvider.getCacheManager(null, null, properties);
}
private CachingProvider getCachingProvider(String cachingProviderFqn) {
if (StringUtils.hasText(cachingProviderFqn)) {
return Caching.getCachingProvider(cachingProviderFqn);
}
return Caching.getCachingProvider();
}
private Properties createCacheManagerProperties() {
Properties properties = new Properties();
if (this.cachePropertiesCustomizers != null) {
for (JCachePropertiesCustomizer customizer : this.cachePropertiesCustomizers) {
customizer.customize(this.cacheProperties, properties);
}
}
return properties;
}
private javax.cache.configuration.Configuration<?, ?> getDefaultCacheConfiguration() {
if (this.defaultCacheConfiguration != null) {
return this.defaultCacheConfiguration;
}
return new MutableConfiguration<>();
}
private void customize(CacheManager cacheManager) {
if (this.cacheManagerCustomizers != null) {
AnnotationAwareOrderComparator.sort(this.cacheManagerCustomizers);
for (JCacheManagerCustomizer customizer : this.cacheManagerCustomizers) {
customizer.customize(cacheManager);
}
}
}
/**
* Determine if JCache is available. This either kicks in if a provider is available
* as defined per {@link JCacheProviderAvailableCondition} or if a
* {@link CacheManager} has already been defined.
*/
@Order(Ordered.LOWEST_PRECEDENCE)
static class JCacheAvailableCondition extends AnyNestedCondition {
JCacheAvailableCondition() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@Conditional(JCacheProviderAvailableCondition.class)
static class JCacheProvider {
}
@ConditionalOnSingleCandidate(CacheManager.class)
static class CustomJCacheCacheManager {
}
}
/**
* Determine if a JCache provider is available. This either kicks in if a default
* {@link CachingProvider} has been found or if the property referring to the provider
* to use has been set.
*/
@Order(Ordered.LOWEST_PRECEDENCE)
static class JCacheProviderAvailableCondition extends SpringBootCondition {
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context,
AnnotatedTypeMetadata metadata) {
ConditionMessage.Builder message = ConditionMessage.forCondition("JCache");
String providerProperty = "spring.cache.jcache.provider";
if (context.getEnvironment().containsProperty(providerProperty)) {
return ConditionOutcome
.match(message.because("JCache provider specified"));
}
Iterator<CachingProvider> providers = Caching.getCachingProviders()
.iterator();
if (!providers.hasNext()) {
return ConditionOutcome
.noMatch(message.didNotFind("JSR-107 provider").atAll());
}
providers.next();
if (providers.hasNext()) {
return ConditionOutcome
.noMatch(message.foundExactly("multiple JSR-107 providers"));
}
return ConditionOutcome
.match(message.foundExactly("single JSR-107 provider"));
}
}
}
| |
package gov.nasa.worldwind.examples.sunlight;
import gov.nasa.worldwind.View;
import gov.nasa.worldwind.layers.AbstractLayer;
import gov.nasa.worldwind.geom.*;
import gov.nasa.worldwind.render.DrawContext;
import gov.nasa.worldwind.util.*;
import javax.media.opengl.*;
import java.awt.*;
/**
* Renders an atmosphere around the globe and a sky dome at low altitude.
* Uses atmospheric scattering as color source.
* <p>
* Issue : Ellipsoidal globe doesnt match the spherical atmosphere everywhere.
* <p>
* TODO: Find a way to get a blue sky at ground level
* TODO: Increase dome geometry resolution and implement partial sphere
*
* @author Patrick Murris
* @version $Id: AtmosphereLayer.java 12584 2009-09-14 19:25:59Z dcollins $
*/
public class AtmosphereLayer extends AbstractLayer
{
protected final static int STACKS = 24;
protected final static int SLICES = 64;
protected int glListId = -1; // GL list id
protected double thickness = 60e3; // Atmosphere thickness
protected double lastRebuildHorizon = 0;
protected AtmosphericScatteringComputer asc;
protected Vec4 sunDirection;
protected boolean update = true;
/**
* Renders an atmosphere around the globe
*/
public AtmosphereLayer() {
}
/**
* Get the atmosphere thickness in meter
* @return the atmosphere thickness in meter
*/
public double getAtmosphereThickness()
{
return this.thickness;
}
/**
* Set the atmosphere thickness in meter
* @param thickness the atmosphere thickness in meter
*/
public void setAtmosphereThickness(double thickness)
{
if (thickness < 0)
{
String msg = Logging.getMessage("generic.ArgumentOutOfRange");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
this.thickness = thickness;
this.asc = null; // invalidate atmospheric scattering computer
this.update = true;
}
public Vec4 getSunDirection()
{
return this.sunDirection;
}
public void setSunDirection(Vec4 direction)
{
this.sunDirection = direction;
this.update = true;
}
@Override
public void doRender(DrawContext dc)
{
GL gl = dc.getGL();
boolean attribsPushed = false;
boolean modelviewPushed = false;
boolean projectionPushed = false;
try {
View view = dc.getView();
Position camPos = dc.getGlobe().computePositionFromPoint(view.getEyePoint());
double worldRadius = dc.getGlobe().getRadiusAt(camPos);
double distToCenterOfPlanet = view.getEyePoint().getLength3();
double camAlt = camPos.getElevation();
double tangentalDistance = view.computeHorizonDistance();
// Dome radius
double domeRadius = tangentalDistance;
// horizon latitude degrees
double horizonLat = (-Math.PI / 2 + Math.acos(tangentalDistance / distToCenterOfPlanet))
* 180 / Math.PI;
// zenith latitude degrees
double zenithLat = 90;
if (camAlt >= thickness) {
double tangentalDistanceZenith = Math.sqrt(distToCenterOfPlanet * distToCenterOfPlanet
- (worldRadius + thickness) * (worldRadius + thickness));
zenithLat = (-Math.PI / 2 + Math.acos(tangentalDistanceZenith / distToCenterOfPlanet)) * 180 / Math.PI;
}
if (camAlt < thickness && camAlt > thickness * 0.7) {
zenithLat = (thickness - camAlt) / (thickness - thickness * 0.7) * 90;
}
// Build or rebuild sky dome if horizon distance changed more then 100m
if (this.update || this.glListId == -1 || Math.abs(this.lastRebuildHorizon - tangentalDistance) > 100)
{
if (this.glListId != -1)
gl.glDeleteLists(this.glListId, 1);
this.makeSkyDome(dc, (float) (domeRadius), horizonLat, zenithLat, SLICES, STACKS);
this.lastRebuildHorizon = tangentalDistance;
this.update = false;
}
// GL set up
gl.glPushAttrib(GL.GL_POLYGON_BIT); // Temporary hack around aliased sky.
gl.glPopAttrib();
gl.glPushAttrib(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT | GL.GL_TRANSFORM_BIT
| GL.GL_POLYGON_BIT | GL.GL_TEXTURE_BIT | GL.GL_ENABLE_BIT
| GL.GL_CURRENT_BIT);
attribsPushed = true;
gl.glDisable(GL.GL_TEXTURE_2D); // no textures
gl.glDisable(GL.GL_DEPTH_TEST);
gl.glDepthMask(false);
Matrix projection = Matrix.fromPerspective(view.getFieldOfView(),
view.getViewport().getWidth(), view.getViewport().getHeight(),
10e3, 2 * distToCenterOfPlanet + 10e3);
double[] matrixArray = new double[16];
projection.toArray(matrixArray, 0, false);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPushMatrix();
projectionPushed = true;
gl.glLoadMatrixd(matrixArray, 0);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glPushMatrix();
modelviewPushed = true;
// Sky transform
Matrix skyTransform = computeSkyTransform(dc);
Matrix modelView = view.getModelviewMatrix().multiply(skyTransform);
modelView.toArray(matrixArray, 0, false);
gl.glLoadMatrixd(matrixArray, 0);
// Draw sky
if (this.glListId != -1)
gl.glCallList(this.glListId);
}
finally {
// Restore GL state
if (modelviewPushed)
{
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glPopMatrix();
}
if (projectionPushed)
{
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPopMatrix();
}
if (attribsPushed)
gl.glPopAttrib();
}
}
/**
* Build sky dome and draw into a glList
*
* @param dc the current DrawContext
* @param radius the sky dome radius in meters.
* @param startLat the horizon latitude in decimal degrees.
* @param endLat the zenith latitude in decimal degrees.
* @param slices the number of longitude divisions used for the dome geometry.
* @param stacks the number of latitude divisions used for the dome geometry.
*/
protected void makeSkyDome(DrawContext dc, float radius, double startLat, double endLat,
int slices, int stacks)
{
if (this.sunDirection == null)
return;
GL gl = dc.getGL();
this.glListId = gl.glGenLists(1);
gl.glNewList(this.glListId, GL.GL_COMPILE);
this.drawSkyGradient(dc, radius, startLat, endLat, slices, stacks);
gl.glEndList();
}
/**
* Draws the sky dome
*
* @param dc the current DrawContext
* @param radius the sky dome radius
* @param startLat the horizon latitude
* @param endLat the zenith latitude
* @param slices the number of slices - vertical divisions
* @param stacks the nuber os stacks - horizontal divisions
*/
protected void drawSkyGradient(DrawContext dc, float radius, double startLat, double endLat,
int slices, int stacks)
{
// Init atmospheric scattering computer
if (this.asc == null)
this.asc = new AtmosphericScatteringComputer(dc.getGlobe().getRadius(), this.thickness);
// Get sky dome transform
Matrix skyTransform = computeSkyTransform(dc);
// GL setup
GL gl = dc.getGL();
gl.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
gl.glEnable(GL.GL_BLEND);
gl.glDisable(GL.GL_TEXTURE_2D);
//gl.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_LINE); // wireframe
double latitude, longitude, latitudeTop = endLat;
double linear, linearTop, k, kTop;
Color color;
Color[] stackColors = new Color[slices + 1];
Vec4 eyePoint = dc.getView().getEyePoint();
// bottom fade
latitude = startLat - Math.max((endLat - startLat) / 4, 2);
gl.glBegin(GL.GL_QUAD_STRIP);
for (int slice = 0; slice <= slices; slice++)
{
longitude = 180 - ((float) slice / slices * (float) 360);
Vec4 v1 = SphericalToCartesian(latitude, longitude, radius);
Vec4 v2 = SphericalToCartesian(startLat, longitude, radius);
color = this.asc.getAtmosphereColor(v2.transformBy4(skyTransform), eyePoint, this.sunDirection);
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, 0f);
gl.glVertex3d(v1.getX(), v1.getY(), v1.getZ());
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, color.getAlpha() / 255f);
gl.glVertex3d(v2.getX(), v2.getY(), v2.getZ());
stackColors[slice] = color;
}
gl.glEnd();
// stacks and slices
for (int stack = 1; stack < stacks - 1; stack++)
{
// bottom vertex
linear = (float) (stack - 1) / (stacks - 1f);
k = 1 - Math.cos(linear * Math.PI / 2);
latitude = startLat + Math.pow(k, 3) * (endLat - startLat);
// top vertex
linearTop = (float) (stack) / (stacks - 1f);
kTop = 1 - Math.cos(linearTop * Math.PI / 2);
latitudeTop = startLat + Math.pow(kTop, 3) * (endLat - startLat);
// Draw stack
gl.glBegin(GL.GL_QUAD_STRIP);
for (int slice = 0; slice <= slices; slice++)
{
longitude = 180 - ((float) slice / slices * (float) 360);
Vec4 v = SphericalToCartesian(latitude, longitude, radius);
color = stackColors[slice];
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, color.getAlpha() / 255f);
gl.glVertex3d(v.getX(), v.getY(), v.getZ());
v = SphericalToCartesian(latitudeTop, longitude, radius);
color = this.asc.getAtmosphereColor(v.transformBy4(skyTransform), eyePoint, this.sunDirection);
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, color.getAlpha() / 255f);
gl.glVertex3d(v.getX(), v.getY(), v.getZ());
stackColors[slice] = color;
}
gl.glEnd();
}
// Top fade
if (endLat < 90)
{
gl.glBegin(GL.GL_QUAD_STRIP);
for (int slice = 0; slice <= slices; slice++) {
longitude = 180 - ((float) slice / slices * (float) 360);
Vec4 v = SphericalToCartesian(latitudeTop, longitude, radius);
color = stackColors[slice];
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, color.getAlpha() / 255f);
gl.glVertex3d(v.getX(), v.getY(), v.getZ());
v = SphericalToCartesian(endLat, longitude, radius);
gl.glColor4f(color.getRed() / 255f, color.getGreen() / 255f, color.getBlue() / 255f, 0);
gl.glVertex3d(v.getX(), v.getY(), v.getZ());
}
gl.glEnd();
}
gl.glEnable(GL.GL_TEXTURE_2D);
gl.glDisable(GL.GL_BLEND);
}
protected Matrix computeSkyTransform(DrawContext dc)
{
Matrix transform = Matrix.IDENTITY;
transform = transform.multiply(dc.getGlobe().computeModelCoordinateOriginTransform(dc.getView().getEyePosition()));
transform = transform.multiply(Matrix.fromRotationX(Angle.POS90));
return transform;
}
/**
* Converts position in spherical coordinates (lat/lon/altitude)
* to cartesian (XYZ) coordinates.
*
* @param latitude Latitude in decimal degrees
* @param longitude Longitude in decimal degrees
* @param radius Radius
* @return the corresponding Point
*/
protected static Vec4 SphericalToCartesian(double latitude, double longitude, double radius) {
latitude *= Math.PI / 180.0f;
longitude *= Math.PI / 180.0f;
double radCosLat = radius * Math.cos(latitude);
return new Vec4(
radCosLat * Math.sin(longitude),
radius * Math.sin(latitude),
radCosLat * Math.cos(longitude));
}
public void dispose()
{
if (this.glListId < 0)
return;
GLContext glc = GLContext.getCurrent();
if (glc == null)
return;
glc.getGL().glDeleteLists(this.glListId, 1);
this.glListId = -1;
}
@Override
public String toString() {
return Logging.getMessage("layers.Earth.SkyGradientLayer.Name");
}
}
| |
package org.codehaus.mojo.versions;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
import org.codehaus.mojo.versions.rewriting.ModifiedPomXMLEventReader;
import org.codehaus.stax2.XMLInputFactory2;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLEventWriter;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.events.EndElement;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Stack;
/**
* Basic tests for rewriting XML with a StAX (JSR-173) implementation.
*
* @author Stephen Connolly
*/
public class RewriteWithStAXTest
extends TestCase
{
public void testBasic()
throws Exception
{
String input = "<?xml version='1.0' encoding='utf-8'?>\n" + "<project>\n\r\n\r\n\r\n\r" + " <parent>\r\n" +
" <groupId xmlns='foo'>org.codehaus.mojo</groupId>\n" +
" <artifactId>mojo-&sandbox-parent</artifactId>\n" + " <version>5-SNAPSHOT</version>\r" +
" </parent>\r" + "<build/></project>";
byte[] rawInput = input.getBytes( "utf-8" );
ByteArrayInputStream source = new ByteArrayInputStream( rawInput );
ByteArrayOutputStream dest = new ByteArrayOutputStream();
XMLInputFactory inputFactory = XMLInputFactory2.newInstance();
inputFactory.setProperty( XMLInputFactory2.P_PRESERVE_LOCATION, Boolean.TRUE );
XMLOutputFactory outputFactory = XMLOutputFactory.newInstance();
XMLEventReader eventReader = inputFactory.createXMLEventReader( source );
XMLEventWriter eventWriter = outputFactory.createXMLEventWriter( dest, "utf-8" );
while ( eventReader.hasNext() )
{
eventWriter.add( eventReader.nextEvent() );
}
String output = new String( dest.toByteArray(), "utf-8" );
assertFalse( "StAX implementation is not good enough", input.equals( output ) );
}
public void testReplace()
throws Exception
{
String input = "<?xml version='1.0' encoding='utf-8'?>\n" + "<project>\n\r\n\r\n\r\n\r" + " <parent>\r\n" +
" <groupId xmlns='foo'>org.codehaus.mojo</groupId>\n" +
" <artifactId>mojo-&sandbox-parent</artifactId>\n" + " <version>5-SNAPSHOT</version>\r" +
" </parent>\r" + "<build/></project>";
String expected = "<?xml version='1.0' encoding='utf-8'?>\n" + "<project>\n\r\n\r\n\r\n\r" + " <parent>\r\n" +
" <groupId xmlns='foo'>org.codehaus.mojo</groupId>\n" + " <artifactId>my-artifact</artifactId>\n" +
" <version>5-SNAPSHOT</version>\r" + " </parent>\r" + "<build/></project>";
StringBuilder output = new StringBuilder( input );
XMLInputFactory inputFactory = XMLInputFactory2.newInstance();
inputFactory.setProperty( XMLInputFactory2.P_PRESERVE_LOCATION, Boolean.TRUE );
ModifiedPomXMLEventReader eventReader = new ModifiedPomXMLEventReader( output, inputFactory );
while ( eventReader.hasNext() )
{
XMLEvent event = eventReader.nextEvent();
if ( event instanceof StartElement &&
event.asStartElement().getName().getLocalPart().equals( "artifactId" ) )
{
eventReader.mark( 0 );
}
if ( event instanceof EndElement && event.asEndElement().getName().getLocalPart().equals( "artifactId" ) )
{
eventReader.mark( 1 );
if ( eventReader.hasMark( 0 ) )
{
eventReader.replaceBetween( 0, 1, "my-artifact" );
}
}
}
assertEquals( expected, output.toString() );
}
public void testReplaceFancy()
throws Exception
{
String input =
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" + "\n" + " <parent>\n" +
" <groupId>org.codehaus.mojo</groupId>\n" + " <artifactId>mojo-sandbox-parent</artifactId>\n" +
" <version>5-SNAPSHOT</version>\n" + " </parent>\n" + "\n" +
" <groupId>org.codehaus.mojo</groupId>\n" + " <artifactId>versions-maven-plugin</artifactId>\n" +
" <version>1.0.0-alpha-1-SNAPSHOT</version>\n" + " <packaging>maven-plugin</packaging>\n" + "\n" +
" <name>Versions Maven Plugin</name>\n" + " <description>\n" +
" Versions plugin for Maven 2. The versions plugin updates the versions of components in the pom.\n" +
" </description>\n" + " <inceptionYear>2008</inceptionYear>\n" + " <licenses>\n" +
" <license>\n" + " <name>The Apache Software License, Version 2.0</name>\n" +
" <url>http://www.apache.org/licenses/LICENSE-2.0</url>\n" +
" <distribution>repo</distribution>\n" + " </license>\n" + " </licenses>\n" + "\n" +
" <scm>\n" +
" <connection>scm:svn:http://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</connection>\n" +
" <developerConnection>scm:svn:https://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</developerConnection>\n" +
" <url>http://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</url>\n" + " </scm>\n" +
"\n" + " <developers>\n" + " <developer>\n" + " <name>Stephen Connolly</name>\n" +
" <email>stephen.alan.connolly@gmail.com</email>\n" + " <roles>\n" +
" <role>Java Developer</role>\n" + " </roles>\n" + " <timezone>0</timezone>\n" +
" </developer>\n" + " </developers>\n" + "\n" + " <prerequisites>\n" +
" <maven>2.0.6</maven>\n" + " </prerequisites>\n" + "\n" + " <dependencies>\n" +
" <dependency>\n" + " <groupId>junit</groupId>\n" + " <artifactId>junit</artifactId>\n" +
" <version>3.8.1</version>\n" + " <scope>test</scope>\n" + " </dependency>\n" +
" <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-project</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-settings</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-plugin-api</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.codehaus.plexus</groupId>\n" +
" <artifactId>plexus-utils</artifactId>\n" + " <version>1.3</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.codehaus.plexus</groupId>\n" +
" <artifactId>plexus-interactivity-api</artifactId>\n" + " <version>1.0-alpha-6</version>\n" +
" <exclusions>\n" + " <exclusion>\n" + " <artifactId>plexus-utils</artifactId>\n" +
" <groupId>plexus</groupId>\n" + " </exclusion>\n" + " </exclusions>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>javax.xml.stream</groupId>\n" +
" <artifactId>stax-api</artifactId>\n" + " <version>1.0-2</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>stax</groupId>\n" +
" <artifactId>stax</artifactId>\n" + " <version>1.1.1-dev</version>\n" +
" </dependency>\n" + " </dependencies>\n" + "\n" + " <build>\n" + " <plugins>\n" +
" <plugin>\n" + " <artifactId>maven-plugin-plugin</artifactId>\n" +
" <version>2.3</version>\n" + " <configuration>\n" +
" <goalPrefix>versions</goalPrefix>\n" + " </configuration>\n" + " </plugin>\n" +
" </plugins>\n" + " </build>\n" + "\n" + "</project>";
String expected =
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" + "\n" + " <parent>\n" +
" <groupId>org.codehaus.mojo</groupId>\n" + " <artifactId>mojo-sandbox-parent</artifactId>\n" +
" <version>4</version>\n" + " </parent>\n" + "\n" + " <groupId>org.codehaus.mojo</groupId>\n" +
" <artifactId>versions-maven-plugin</artifactId>\n" + " <version>1.0.0-alpha-1-SNAPSHOT</version>\n" +
" <packaging>maven-plugin</packaging>\n" + "\n" + " <name>Versions Maven Plugin</name>\n" +
" <description>\n" +
" Versions plugin for Maven 2. The versions plugin updates the versions of components in the pom.\n" +
" </description>\n" + " <inceptionYear>2008</inceptionYear>\n" + " <licenses>\n" +
" <license>\n" + " <name>The Apache Software License, Version 2.0</name>\n" +
" <url>http://www.apache.org/licenses/LICENSE-2.0</url>\n" +
" <distribution>repo</distribution>\n" + " </license>\n" + " </licenses>\n" + "\n" +
" <scm>\n" +
" <connection>scm:svn:http://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</connection>\n" +
" <developerConnection>scm:svn:https://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</developerConnection>\n" +
" <url>http://svn.codehaus.org/mojo/trunk/sandbox/versions-maven-plugin</url>\n" + " </scm>\n" +
"\n" + " <developers>\n" + " <developer>\n" + " <name>Stephen Connolly</name>\n" +
" <email>stephen.alan.connolly@gmail.com</email>\n" + " <roles>\n" +
" <role>Java Developer</role>\n" + " </roles>\n" + " <timezone>0</timezone>\n" +
" </developer>\n" + " </developers>\n" + "\n" + " <prerequisites>\n" +
" <maven>2.0.6</maven>\n" + " </prerequisites>\n" + "\n" + " <dependencies>\n" +
" <dependency>\n" + " <groupId>junit</groupId>\n" + " <artifactId>junit</artifactId>\n" +
" <version>3.8.2</version>\n" + " <scope>test</scope>\n" + " </dependency>\n" +
" <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-project</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-settings</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.apache.maven</groupId>\n" +
" <artifactId>maven-plugin-api</artifactId>\n" + " <version>2.0</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.codehaus.plexus</groupId>\n" +
" <artifactId>plexus-utils</artifactId>\n" + " <version>1.3</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>org.codehaus.plexus</groupId>\n" +
" <artifactId>plexus-interactivity-api</artifactId>\n" + " <version>1.0-alpha-6</version>\n" +
" <exclusions>\n" + " <exclusion>\n" + " <artifactId>plexus-utils</artifactId>\n" +
" <groupId>plexus</groupId>\n" + " </exclusion>\n" + " </exclusions>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>javax.xml.stream</groupId>\n" +
" <artifactId>stax-api</artifactId>\n" + " <version>1.0-2</version>\n" +
" </dependency>\n" + " <dependency>\n" + " <groupId>stax</groupId>\n" +
" <artifactId>stax</artifactId>\n" + " <version>1.1.1-dev</version>\n" +
" </dependency>\n" + " </dependencies>\n" + "\n" + " <build>\n" + " <plugins>\n" +
" <plugin>\n" + " <artifactId>maven-plugin-plugin</artifactId>\n" +
" <version>2.3</version>\n" + " <configuration>\n" +
" <goalPrefix>versions</goalPrefix>\n" + " </configuration>\n" + " </plugin>\n" +
" </plugins>\n" + " </build>\n" + "\n" + "</project>";
StringBuilder output = new StringBuilder( input );
XMLInputFactory inputFactory = XMLInputFactory2.newInstance();
inputFactory.setProperty( XMLInputFactory2.P_PRESERVE_LOCATION, Boolean.TRUE );
ModifiedPomXMLEventReader eventReader = new ModifiedPomXMLEventReader( output, inputFactory );
Stack<String> stack = new Stack<String>();
String path = "";
while ( eventReader.hasNext() )
{
XMLEvent event = eventReader.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
path += "/" + event.asStartElement().getName().getLocalPart();
if ( "/project/parent/version".equals( path ) )
{
eventReader.mark( 0 );
}
}
if ( event.isEndElement() )
{
if ( "/project/parent/version".equals( path ) )
{
eventReader.mark( 1 );
if ( eventReader.hasMark( 0 ) )
{
eventReader.replaceBetween( 0, 1, "4" );
}
}
path = stack.pop();
}
}
boolean inDependency = false;
boolean groupIdMatches = false;
boolean artifactIdMatches = false;
eventReader.rewind();
while ( eventReader.hasNext() )
{
XMLEvent event = eventReader.nextEvent();
if ( event.isStartElement() )
{
String name = event.asStartElement().getName().getLocalPart();
if ( inDependency )
{
if ( "groupId".equals( name ) )
{
groupIdMatches = "junit".equals( eventReader.getElementText() );
}
else if ( "artifactId".equals( name ) )
{
artifactIdMatches = "junit".equals( eventReader.getElementText() );
}
else if ( "version".equals( name ) )
{
eventReader.mark( 1 );
}
}
else if ( "dependency".equals( name ) )
{
inDependency = true;
groupIdMatches = false;
artifactIdMatches = false;
}
}
if ( event.isEndElement() )
{
String name = event.asEndElement().getName().getLocalPart();
if ( inDependency )
{
if ( "version".equals( name ) )
{
eventReader.mark( 2 );
}
else if ( "dependency".equals( name ) )
{
if ( groupIdMatches && artifactIdMatches && eventReader.hasMark( 1 ) &&
eventReader.hasMark( 2 ) )
{
eventReader.replaceBetween( 1, 2, "3.8.2" );
}
inDependency = false;
}
}
}
}
assertEquals( expected, output.toString() );
}
}
| |
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Scanner;
public class NewElementSDsPair {
public static String reverse(String str){
char[] temp=new char[str.length()];
for(int i=str.length()-1;i>=0;i--){
if(str.charAt(i)=='N'){
temp[str.length()-1-i]='N';
}
else if(str.charAt(i)=='A'){
temp[str.length()-1-i]='T';
}
else if(str.charAt(i)=='C'){
temp[str.length()-1-i]='G';
}
else if(str.charAt(i)=='G'){
temp[str.length()-1-i]='C';
}
else if(str.charAt(i)=='T'){
temp[str.length()-1-i]='A';
}
else if(str.charAt(i)=='a'){
temp[str.length()-1-i]='t';
}
else if(str.charAt(i)=='c'){
temp[str.length()-1-i]='g';
}
else if(str.charAt(i)=='g'){
temp[str.length()-1-i]='c';
}
else if(str.charAt(i)=='t'){
temp[str.length()-1-i]='a';
}
else if(str.charAt(i)=='n'){
temp[str.length()-1-i]='n';
}
else {
// System.out.println("something wrong!");
}
}
return String.valueOf(temp);
}
public static String[] cigarAlignment(String seg1, String seg2, String cigar){
int start=0, length=0; int index1=0, index2=0, index=0, num;
for(int i=0;i<cigar.length();i++){
if(cigar.charAt(i)=='M'||cigar.charAt(i)=='I'||cigar.charAt(i)=='D'){
length=length+Integer.parseInt(cigar.substring(start, i)); start=i+1;
}
}
char[] str1=new char[length]; char[] str2=new char[length]; start=0;
for(int i=0;i<cigar.length();i++){
if(cigar.charAt(i)=='M'){
num=Integer.parseInt(cigar.substring(start, i)); start=i+1;
while(num>0){
num--;
while(seg1.charAt(index1)=='N'){
index1++;
}
str1[index]=seg1.charAt(index1); index1++;
while(seg2.charAt(index2)=='N'){
index2++;
}
str2[index]=seg2.charAt(index2); index2++; index++;
}
}
if(cigar.charAt(i)=='D'){
num=Integer.parseInt(cigar.substring(start, i)); start=i+1;
while(num>0){
num--;
while(seg1.charAt(index1)=='N'){
index1++;
}
str1[index]=seg1.charAt(index1); index1++;
str2[index]='-'; index++;
}
}
if(cigar.charAt(i)=='I'){
num=Integer.parseInt(cigar.substring(start, i)); start=i+1;
while(num>0){
num--;
str1[index]='-';
while(seg2.charAt(index2)=='N'){
index2++;
}
str2[index]=seg2.charAt(index2); index2++; index++;
}
}
}
String[] temp={String.valueOf(str1),String.valueOf(str2)};
return temp;
}
public static void computeIndex(String[] alignment, int[] seg1To2, int[] seg2To1, String seg1, String seg2, String str1, String str2){
int[][] temp=new int[2][alignment[0].length()]; int index;
if(str1.equalsIgnoreCase("+")){
index=0;
while(seg1.charAt(index)=='N'){
index++;
}
int p=0;
while(alignment[0].charAt(p)=='-'){
temp[0][p]=index; p++;
}
temp[0][p]=index;
for(int i=p+1;i<alignment[0].length();i++){
if(alignment[0].charAt(i)!='-'){
index++;
while(seg1.charAt(index)=='N'){
index++;
}
}
temp[0][i]=index;
}
}
else{
index=0;
while(seg1.charAt(index)=='N'){
index++;
}
int p=0;
while(alignment[0].charAt(p)=='-'){
temp[0][p]=seg1.length()-1-index; p++;
}
temp[0][p]=seg1.length()-1-index;
for(int i=p+1;i<alignment[0].length();i++){
if(alignment[0].charAt(i)!='-'){
index++;
while(seg1.charAt(index)=='N'){
index++;
}
}
temp[0][i]=seg1.length()-1-index;
}
}
if(str2.equalsIgnoreCase("+")){
index=0;
while(seg2.charAt(index)=='N'){
index++;
}
int p=0;
while(alignment[1].charAt(p)=='-'){
temp[1][p]=index; p++;
}
temp[1][p]=index;
for(int i=p+1;i<alignment[1].length();i++){
if(alignment[1].charAt(i)!='-'){
index++;
while(seg2.charAt(index)=='N'){
index++;
}
}
temp[1][i]=index;
}
}
else{
index=0;
while(seg2.charAt(index)=='N'){
index++;
}
int p=0;
while(alignment[1].charAt(p)=='-'){
temp[1][p]=seg2.length()-1-index; p++;
}
temp[1][p]=seg2.length()-1-index;
for(int i=p+1;i<alignment[1].length();i++){
if(alignment[1].charAt(i)!='-'){
index++;
while(seg2.charAt(index)=='N'){
index++;
}
}
temp[1][i]=seg2.length()-1-index;
}
}
for(int i=0;i<alignment[0].length();i++){
if(alignment[0].charAt(i)!='-'){
seg1To2[temp[0][i]]=temp[1][i];
}
if(alignment[1].charAt(i)!='-'){
seg2To1[temp[1][i]]=temp[0][i];
}
}
index=0; int start=index;
while(index<seg1To2.length&&seg1To2[index]==-1){
index++;
}
for(int i=start;i<index;i++){
seg1To2[i]=seg1To2[index];
}
index++; start=index;
while(index<seg1To2.length){
while(index<seg1To2.length&&seg1To2[index]==-1){
index++;
}
for(int i=start;i<index;i++){
seg1To2[i]=seg1To2[start-1];
}
index++; start=index;
}
index=0; start=index;
while(index<seg2To1.length&&seg2To1[index]==-1){
index++;
}
for(int i=start;i<index;i++){
seg2To1[i]=seg2To1[index];
}
index++; start=index;
while(index<seg2To1.length){
while(index<seg2To1.length&&seg2To1[index]==-1){
index++;
}
for(int i=start;i<index;i++){
seg2To1[i]=seg2To1[start-1];
}
index++; start=index;
}
}
public static boolean exist(String clusters2, String chr1, int[] seg1to2, int start, int end, int start1, int start2){
//clusters.get(otherChr), chroms.get(chr), seg1to2, pairs.get(chr).get(i)[0], pairs.get(chr).get(i)[1], start, otherStart
boolean exist=true; int num=0;
int index1=start; int index2=end-1;
while(index1!=index2){
index1++;
while(chr1.charAt(index1)=='N'){
index1++;
}
if(index1!=index2){
index2--;
while(chr1.charAt(index2)=='N'){
index2--;
}
}
}
if(clusters2.charAt(start2+seg1to2[index1-start1])=='1'){
return false;
}
else{
return true;
}
}
public static void main(String args[]){
try{
long startTime=System.currentTimeMillis();
ArrayList<String> chroms=new ArrayList<String>(); ArrayList<String> clusters=new ArrayList<String>();
ArrayList<String> chromsN=new ArrayList<String>();
Scanner inChr = new Scanner(new File(args[0]));
Scanner inChrN = new Scanner(new File(args[1]));
Scanner inCluster = new Scanner(new File(args[2]));
while(inChr.hasNextLine()){
inChr.nextLine(); chroms.add(inChr.nextLine());
inChrN.nextLine(); chromsN.add(inChrN.nextLine());
inCluster.nextLine();
clusters.add(inCluster.nextLine());
}
// System.out.println("end reading "+clusters.size());
inChr.close(); inCluster.close();
ArrayList<ArrayList<int[]>> pairs=new ArrayList<ArrayList<int[]>>();
for(int i=0;i<chroms.size();i++){
ArrayList<int[]> temp=new ArrayList<int[]>(); pairs.add(temp);
}
Scanner in = new Scanner(new File(args[3])); in.nextLine();
String[] onepair; int index=1;
while(in.hasNextLine()){
onepair=in.nextLine().trim().split("[\\p{Space}]+");
int[] temp={Integer.parseInt(onepair[5]), Integer.parseInt(onepair[6]), index};//note to revise
pairs.get(Integer.parseInt(onepair[1])).add(temp); index++;
}
in.close();
// System.out.println(index);
Scanner inL = new Scanner(new File(args[4]));
in=new Scanner(new File(args[5])); in.nextLine(); // SCN_LastzResult_500NonCR_NewExtendPec50.txt
String outdir = args[6];
BufferedWriter writerE = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseEqual.fasta")));
BufferedWriter writerU = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseUnequal.fasta")));
BufferedWriter writerLE = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseLengthEqual.fasta")));
BufferedWriter writerLU = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseLengthUequal.fasta")));
BufferedWriter writerDistri = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseDistri.fasta")));
BufferedWriter writer = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseSpecial_9.fasta")));
BufferedWriter writerDif = new BufferedWriter(new FileWriter(new File(outdir, "ElementSDs_pairwiseDifference.fasta")));
int[][] distri=new int[12][12];
for(int i=0;i<12;i++){
for(int j=0;j<12;j++){
distri[i][j]=0;
}
}
int chr, start, end, otherChr, otherStart, otherEnd, extendS1, extendE1, extendS2, extendE2;
int good=0, bad=0, wrong=0; index=-1; int index1,index2; String length;
ArrayList<Integer> num1=new ArrayList<Integer>(); ArrayList<Integer> num2=new ArrayList<Integer>();
ArrayList<Integer> origin1=new ArrayList<Integer>(); ArrayList<Integer> origin2=new ArrayList<Integer>();
String[] alignment=new String[2]; String[] alignS=new String[2]; String[] alignE=new String[2];
String seg1, seg2, segS1="", segS2="", segE1="", segE2="", cigarS="", cigarE=""; int miss=0;
while(in.hasNextLine()){
onepair=in.nextLine().trim().split("[\\p{Space}]+"); index++; length=inL.nextLine();
chr=Integer.parseInt(onepair[4]); start=Integer.parseInt(onepair[5]); end=Integer.parseInt(onepair[6]);
otherChr=Integer.parseInt(onepair[7]); otherStart=Integer.parseInt(onepair[8]); otherEnd=Integer.parseInt(onepair[9]);
extendS1=Integer.parseInt(onepair[16]); extendE1=Integer.parseInt(onepair[17]); cigarS=onepair[20];
extendS2=Integer.parseInt(onepair[18]); extendE2=Integer.parseInt(onepair[19]); cigarE=onepair[21];
if(onepair[14].equalsIgnoreCase("+")){
seg1=chroms.get(chr).substring(start, end);
if(!cigarS.equalsIgnoreCase("-1")){
segS1=chromsN.get(chr).substring(extendS1, start).toUpperCase();
start=extendS1;
}
if(!cigarE.equalsIgnoreCase("-1")){
segE1=chromsN.get(chr).substring(end, extendE1).toUpperCase();
end=extendE1;
}
}
else{
seg1=reverse(chroms.get(chr).substring(start, end));
if(!cigarS.equalsIgnoreCase("-1")){
segS1=reverse(chromsN.get(chr).substring(end, extendE1).toUpperCase());
end=extendE1;
}
if(!cigarE.equalsIgnoreCase("-1")){
segE1=reverse(chromsN.get(chr).substring(extendS1, start).toUpperCase());
start=extendS1;
}
}
if(onepair[15].equalsIgnoreCase("+")){
seg2=chroms.get(otherChr).substring(otherStart, otherEnd);
if(!cigarS.equalsIgnoreCase("-1")){
segS2=chromsN.get(otherChr).substring(extendS2, otherStart).toUpperCase();
otherStart=extendS2;
}
if(!cigarE.equalsIgnoreCase("-1")){
segE2=chromsN.get(otherChr).substring(otherEnd, extendE2).toUpperCase();
otherEnd=extendE2;
}
}
else{
seg2=reverse(chroms.get(otherChr).substring(otherStart, otherEnd));
if(!cigarS.equalsIgnoreCase("-1")){
segS2=reverse(chromsN.get(otherChr).substring(otherEnd, extendE2).toUpperCase());
otherEnd=extendE2;
}
if(!cigarE.equalsIgnoreCase("-1")){
segE2=reverse(chromsN.get(otherChr).substring(extendS2, otherStart).toUpperCase());
otherStart=extendS2;
}
}
alignment=cigarAlignment(seg1,seg2,onepair[1]);
if(!cigarS.equalsIgnoreCase("-1")){
alignS=cigarAlignment(segS1,segS2,cigarS);
alignment[0]=alignS[0]+alignment[0];
alignment[1]=alignS[1]+alignment[1];
seg1=segS1+seg1; seg2=segS2+seg2;
}
if(!cigarE.equalsIgnoreCase("-1")){
alignE=cigarAlignment(segE1,segE2,cigarE);
alignment[0]=alignment[0]+alignE[0];
alignment[1]=alignment[1]+alignE[1];
seg1=seg1+segE1; seg2=seg2+segE2;
}
int[] seg1to2=new int[seg1.length()]; int[] seg2to1=new int[seg2.length()];
for(int i=0;i<seg1.length();i++){
seg1to2[i]=-1;
}
for(int i=0;i<seg2.length();i++){
seg2to1[i]=-1;
}
computeIndex(alignment, seg1to2, seg2to1, seg1, seg2, onepair[14], onepair[15]);
// System.out.println(index+" test 1");
num1.clear(); num2.clear(); origin1.clear(); origin2.clear();
for(int i=0;i<pairs.get(chr).size();i++){
if(pairs.get(chr).get(i)[0]>=start&&pairs.get(chr).get(i)[1]<=end){
origin1.add(pairs.get(chr).get(i)[2]);
if(exist(clusters.get(otherChr), chroms.get(chr), seg1to2, pairs.get(chr).get(i)[0], pairs.get(chr).get(i)[1], start, otherStart)){
num1.add(pairs.get(chr).get(i)[2]);
}
}
}
for(int i=0;i<pairs.get(otherChr).size();i++){
if(pairs.get(otherChr).get(i)[0]>=otherStart&&pairs.get(otherChr).get(i)[1]<=otherEnd){
origin2.add(pairs.get(otherChr).get(i)[2]);
if(exist(clusters.get(chr), chroms.get(otherChr), seg2to1, pairs.get(otherChr).get(i)[0], pairs.get(otherChr).get(i)[1], otherStart, start)){
num2.add(pairs.get(otherChr).get(i)[2]);
}
}
}
if(origin1.size()==0||origin2.size()==0){
miss++;
}
// System.out.println(index+" "+num1+" "+num2);
if(num1.size()==0||num2.size()==0){
wrong++;
if(origin1.size()>0&&origin2.size()>0){
writer.write(index+": ");
if(onepair[14].equalsIgnoreCase("+")){
for(int i=0;i<origin1.size();i++){
writer.write(origin1.get(i)+" ");
}
}
else{
for(int i=origin1.size()-1;i>=0;i--){
writer.write(origin1.get(i)+" ");
}
}
writer.write("-- ");
if(onepair[15].equalsIgnoreCase("+")){
for(int i=0;i<origin2.size();i++){
writer.write(origin2.get(i)+" ");
}
}
else{
for(int i=origin2.size()-1;i>=0;i--){
writer.write(origin2.get(i)+" ");
}
}
writer.newLine(); writer.newLine();
}
}
else{
writerDif.write(index+" "+origin1.size()+" "+origin2.size()+" "+num1.size()+" "+num2.size()); writerDif.newLine();
if(num1.size()==num2.size()){
good++;
writerE.write(index+": ");
if(onepair[14].equalsIgnoreCase("+")){
for(int i=0;i<num1.size();i++){
writerE.write(num1.get(i)+" ");
}
}
else{
for(int i=num1.size()-1;i>=0;i--){
writerE.write("-"+num1.get(i)+" ");
}
}
writerE.write("-- ");
if(onepair[15].equalsIgnoreCase("+")){
for(int i=0;i<num2.size();i++){
writerE.write(num2.get(i)+" ");
}
}
else{
for(int i=num2.size()-1;i>=0;i--){
writerE.write("-"+num2.get(i)+" ");
}
}
writerE.newLine(); writerE.newLine();
writerLE.write(length); writerLE.newLine();
}
else{
bad++;
writerU.write(index+": ");
if(onepair[14].equalsIgnoreCase("+")){
for(int i=0;i<num1.size();i++){
writerU.write(num1.get(i)+" ");
}
}
else{
for(int i=num1.size()-1;i>=0;i--){
writerU.write(num1.get(i)+" ");
}
}
writerU.write("-- ");
if(onepair[15].equalsIgnoreCase("+")){
for(int i=0;i<num2.size();i++){
writerU.write(num2.get(i)+" ");
}
}
else{
for(int i=num2.size()-1;i>=0;i--){
writerU.write(num2.get(i)+" ");
}
}
writerU.newLine(); writerU.newLine();
writerLU.write(length); writerLU.newLine();
}
if(num1.size()<=10){
index1=num1.size()-1;
}
else if(num1.size()<=20){
index1=10;
}
else{
index1=11;
}
if(num2.size()<=10){
index2=num2.size()-1;
}
else if(num2.size()<=20){
index2=10;
}
else{
index2=11;
}
distri[index1][index2]++;
}
// System.out.println(index);
}
for(int i=0;i<12;i++){
for(int j=0;j<12;j++){
writerDistri.write(distri[i][j]+" ");
}
writerDistri.newLine();
}
long endTime=System.currentTimeMillis();
writerE.close();writerU.close(); in.close(); writerDistri.close(); writerLE.close(); writerLU.close(); writer.close(); writerDif.close();
// System.out.println("end! "+index+" "+good+" "+bad+" "+wrong+" "+miss);
// System.out.println("runTime: "+(endTime-startTime));
}catch (FileNotFoundException e) {
e.printStackTrace();
}
catch(Exception e){
}
}
}
| |
package com.xiyili.fontawesome;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.text.TextPaint;
import android.util.DisplayMetrics;
/**
* Created by banxi on 14-5-20.
*
*/
public class FontDrawable extends Drawable {
private static final int DEFAULT_PAINT_FLAGS = Paint.ANTI_ALIAS_FLAG|Paint.LINEAR_TEXT_FLAG
|Paint.SUBPIXEL_TEXT_FLAG;
private final Character mChar;
private int mOpticalSize;
private int mFullassetSize;
private int mType = ICON_TYPE_SMALL;
private float mTargetDensity;
public static final int ICON_TYPE_SMALL = 1;
public static final int ICON_TYPE_ACTION_BAR = 2;
public static final int ICON_TYPE_NOTIFICATION = 3;
public static Typeface typeface = null;
private Paint mBgPaint;
private FontState mFontState = null;
public FontDrawable(Resources res, Character ch) {
mChar = ch;
DisplayMetrics metrics = res.getDisplayMetrics();
mTargetDensity = metrics.density;
mFontState = new FontState(ch, ICON_TYPE_SMALL, mTargetDensity);
if (typeface == null) {
typeface = Typeface.createFromAsset(res.getAssets(), "FontAwesome.otf");
}
mFontState.mPaint.setTypeface(typeface);
mFontState.mPaint.setTextAlign(Paint.Align.CENTER);
updateSize();
adjustFontSize();
// setBackgroundColor(Color.RED);
}
private void updateSize() {
int fullAssetDip = 16;
int opticalDip = 12;
switch (mType) {
case ICON_TYPE_SMALL:
fullAssetDip = 16;
opticalDip = 12;
break;
case ICON_TYPE_ACTION_BAR:
fullAssetDip = 32;
opticalDip = 24;
break;
case ICON_TYPE_NOTIFICATION:
fullAssetDip = 24;
opticalDip = 22;
break;
}
mOpticalSize = (int) (opticalDip* mTargetDensity +0.5);
mFullassetSize = (int) (fullAssetDip * mTargetDensity + 0.5);
invalidateSelf();
}
private void adjustFontSize() {
float fontSize = 8;
float width = 0;
while (width < mOpticalSize) {
mFontState.mPaint.setTextSize(fontSize);
width = mFontState.mPaint.measureText(String.valueOf(mChar));
fontSize++;
}
}
@Override
public void draw(Canvas canvas) {
if (mChar == null) {
return;
}
final TextPaint paint = mFontState.mPaint;
float x = mFullassetSize*0.5f;
float y = mFullassetSize * 0.5f - ((paint.descent() + paint.ascent()) * 0.5f);
if (mBgPaint != null) {
canvas.drawRect(0,0,mFullassetSize,mFullassetSize,mBgPaint);
}
canvas.drawText(String.valueOf(mChar),x,y,mFontState.mPaint);
}
public FontDrawable setType(int type) {
mType = type;
updateSize();
adjustFontSize();
return this;
}
public FontDrawable setColor(int color) {
mFontState.mPaint.setColor(color);
return this;
}
public FontDrawable setBackgroundColor(int color) {
if (mBgPaint == null) {
mBgPaint = new Paint();
}
mBgPaint.setColor(color);
return this;
}
public int getColor() {
return mFontState.mPaint.getColor();
}
@Override
public boolean getPadding(Rect padding) {
int px = mFullassetSize - mOpticalSize;
if (px > 0) {
padding.set(px, px, px, px);
return true;
}
return false;
}
@Override
public int getIntrinsicWidth() {
return mFullassetSize;
}
@Override
public int getIntrinsicHeight() {
return mFullassetSize;
}
@Override
public void setAlpha(int alpha) {
int oldAlpha = mFontState.mPaint.getAlpha();
if (oldAlpha != alpha) {
mFontState.mPaint.setAlpha(alpha);
invalidateSelf();
}
}
@Override
public int getAlpha() {
return mFontState.mPaint.getAlpha();
}
@Override
public void setColorFilter(ColorFilter cf) {
mFontState.mPaint.setColorFilter(cf);
invalidateSelf();
}
@Override
public int getOpacity() {
return PixelFormat.OPAQUE;
}
@Override
public ConstantState getConstantState() {
if (mFontState != null) {
mFontState.mChangingConfiguration = getChangingConfigurations();
}
return mFontState;
}
final static class FontState extends ConstantState{
Character mChar;
int mType;
float mTargetDensity;
TextPaint mPaint = new TextPaint(DEFAULT_PAINT_FLAGS);
int mChangingConfiguration;
@Override
public Drawable newDrawable() {
return new FontDrawable(this,null);
}
@Override
public Drawable newDrawable(Resources res) {
return new FontDrawable(res, mChar);
}
FontState(Character character, int type, float density) {
mChar = character;
mType = type;
mTargetDensity = density;
}
FontState(FontState fontState) {
mChangingConfiguration = fontState.getChangingConfigurations();
mType = fontState.mType;
mChar = fontState.mChar;
mTargetDensity = fontState.mTargetDensity;
}
@Override
public int getChangingConfigurations() {
return mChangingConfiguration;
}
}
private FontDrawable(FontState state,Resources res) {
if (res != null) {
mTargetDensity = res.getDisplayMetrics().density;
}else{
mTargetDensity = state.mTargetDensity;
}
mChar = state.mChar;
setType(state.mType);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.